var/home/core/zuul-output/0000755000175000017500000000000015116266324014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116274447015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004442222415116274440017705 0ustar rootrootDec 10 12:56:41 crc systemd[1]: Starting Kubernetes Kubelet... Dec 10 12:56:41 crc restorecon[4649]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:41 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 12:56:42 crc restorecon[4649]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 10 12:56:42 crc kubenswrapper[4921]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 12:56:42 crc kubenswrapper[4921]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 10 12:56:42 crc kubenswrapper[4921]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 12:56:42 crc kubenswrapper[4921]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 12:56:42 crc kubenswrapper[4921]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 10 12:56:42 crc kubenswrapper[4921]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.986465 4921 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990692 4921 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990721 4921 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990726 4921 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990730 4921 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990734 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990737 4921 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990742 4921 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990746 4921 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990749 4921 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990753 4921 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990758 4921 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990764 4921 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990769 4921 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990774 4921 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990779 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990784 4921 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990789 4921 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990794 4921 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990798 4921 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990802 4921 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990806 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990809 4921 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990814 4921 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990819 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990822 4921 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990826 4921 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990830 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990844 4921 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990848 4921 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990853 4921 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990857 4921 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990861 4921 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990865 4921 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990869 4921 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990872 4921 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990876 4921 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990880 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990884 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990888 4921 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990891 4921 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990895 4921 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990899 4921 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990902 4921 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990906 4921 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990911 4921 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990915 4921 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990919 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990922 4921 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990926 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990930 4921 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990933 4921 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990937 4921 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990940 4921 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990944 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990947 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990951 4921 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990954 4921 feature_gate.go:330] unrecognized feature gate: Example Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990957 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990961 4921 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990965 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990968 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990971 4921 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990976 4921 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990982 4921 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990986 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990990 4921 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990994 4921 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.990998 4921 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.991001 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.991005 4921 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 12:56:42 crc kubenswrapper[4921]: W1210 12:56:42.991009 4921 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991280 4921 flags.go:64] FLAG: --address="0.0.0.0" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991290 4921 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991297 4921 flags.go:64] FLAG: --anonymous-auth="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991303 4921 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991308 4921 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991312 4921 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991319 4921 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991325 4921 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991329 4921 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991335 4921 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991339 4921 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991344 4921 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991348 4921 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991352 4921 flags.go:64] FLAG: --cgroup-root="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991356 4921 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991360 4921 flags.go:64] FLAG: --client-ca-file="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991365 4921 flags.go:64] FLAG: --cloud-config="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991369 4921 flags.go:64] FLAG: --cloud-provider="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991373 4921 flags.go:64] FLAG: --cluster-dns="[]" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991377 4921 flags.go:64] FLAG: --cluster-domain="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991381 4921 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991385 4921 flags.go:64] FLAG: --config-dir="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991402 4921 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991407 4921 flags.go:64] FLAG: --container-log-max-files="5" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991412 4921 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991416 4921 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991420 4921 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991425 4921 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991430 4921 flags.go:64] FLAG: --contention-profiling="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991435 4921 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991440 4921 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991446 4921 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991452 4921 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991458 4921 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991463 4921 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991468 4921 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991472 4921 flags.go:64] FLAG: --enable-load-reader="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991478 4921 flags.go:64] FLAG: --enable-server="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991483 4921 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991489 4921 flags.go:64] FLAG: --event-burst="100" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991494 4921 flags.go:64] FLAG: --event-qps="50" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991500 4921 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991505 4921 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991510 4921 flags.go:64] FLAG: --eviction-hard="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991516 4921 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991521 4921 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991525 4921 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991531 4921 flags.go:64] FLAG: --eviction-soft="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991535 4921 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991540 4921 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991545 4921 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991549 4921 flags.go:64] FLAG: --experimental-mounter-path="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991554 4921 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991559 4921 flags.go:64] FLAG: --fail-swap-on="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991563 4921 flags.go:64] FLAG: --feature-gates="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991569 4921 flags.go:64] FLAG: --file-check-frequency="20s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991574 4921 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991579 4921 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991584 4921 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991589 4921 flags.go:64] FLAG: --healthz-port="10248" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991595 4921 flags.go:64] FLAG: --help="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991601 4921 flags.go:64] FLAG: --hostname-override="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991606 4921 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991615 4921 flags.go:64] FLAG: --http-check-frequency="20s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991620 4921 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991624 4921 flags.go:64] FLAG: --image-credential-provider-config="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991629 4921 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991634 4921 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991638 4921 flags.go:64] FLAG: --image-service-endpoint="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991643 4921 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991648 4921 flags.go:64] FLAG: --kube-api-burst="100" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991653 4921 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991657 4921 flags.go:64] FLAG: --kube-api-qps="50" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991663 4921 flags.go:64] FLAG: --kube-reserved="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991668 4921 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991673 4921 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991679 4921 flags.go:64] FLAG: --kubelet-cgroups="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991683 4921 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991687 4921 flags.go:64] FLAG: --lock-file="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991691 4921 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991695 4921 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991700 4921 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991706 4921 flags.go:64] FLAG: --log-json-split-stream="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991710 4921 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991714 4921 flags.go:64] FLAG: --log-text-split-stream="false" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991718 4921 flags.go:64] FLAG: --logging-format="text" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991723 4921 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991728 4921 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991733 4921 flags.go:64] FLAG: --manifest-url="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991737 4921 flags.go:64] FLAG: --manifest-url-header="" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991744 4921 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991749 4921 flags.go:64] FLAG: --max-open-files="1000000" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991755 4921 flags.go:64] FLAG: --max-pods="110" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991760 4921 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991765 4921 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991770 4921 flags.go:64] FLAG: --memory-manager-policy="None" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991775 4921 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991781 4921 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991786 4921 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991790 4921 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991802 4921 flags.go:64] FLAG: --node-status-max-images="50" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991806 4921 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991812 4921 flags.go:64] FLAG: --oom-score-adj="-999" Dec 10 12:56:42 crc kubenswrapper[4921]: I1210 12:56:42.991817 4921 flags.go:64] FLAG: --pod-cidr="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991823 4921 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991831 4921 flags.go:64] FLAG: --pod-manifest-path="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991835 4921 flags.go:64] FLAG: --pod-max-pids="-1" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991840 4921 flags.go:64] FLAG: --pods-per-core="0" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991844 4921 flags.go:64] FLAG: --port="10250" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991848 4921 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991853 4921 flags.go:64] FLAG: --provider-id="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991857 4921 flags.go:64] FLAG: --qos-reserved="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991861 4921 flags.go:64] FLAG: --read-only-port="10255" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991865 4921 flags.go:64] FLAG: --register-node="true" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991869 4921 flags.go:64] FLAG: --register-schedulable="true" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991873 4921 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991880 4921 flags.go:64] FLAG: --registry-burst="10" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991884 4921 flags.go:64] FLAG: --registry-qps="5" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991888 4921 flags.go:64] FLAG: --reserved-cpus="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991893 4921 flags.go:64] FLAG: --reserved-memory="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991899 4921 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991903 4921 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991908 4921 flags.go:64] FLAG: --rotate-certificates="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991912 4921 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991916 4921 flags.go:64] FLAG: --runonce="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991920 4921 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991924 4921 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991928 4921 flags.go:64] FLAG: --seccomp-default="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991932 4921 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991936 4921 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991940 4921 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991944 4921 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991949 4921 flags.go:64] FLAG: --storage-driver-password="root" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991953 4921 flags.go:64] FLAG: --storage-driver-secure="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991958 4921 flags.go:64] FLAG: --storage-driver-table="stats" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991962 4921 flags.go:64] FLAG: --storage-driver-user="root" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991969 4921 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991975 4921 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991989 4921 flags.go:64] FLAG: --system-cgroups="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.991994 4921 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992002 4921 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992008 4921 flags.go:64] FLAG: --tls-cert-file="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992013 4921 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992021 4921 flags.go:64] FLAG: --tls-min-version="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992027 4921 flags.go:64] FLAG: --tls-private-key-file="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992032 4921 flags.go:64] FLAG: --topology-manager-policy="none" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992037 4921 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992042 4921 flags.go:64] FLAG: --topology-manager-scope="container" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992048 4921 flags.go:64] FLAG: --v="2" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992055 4921 flags.go:64] FLAG: --version="false" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992062 4921 flags.go:64] FLAG: --vmodule="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992067 4921 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992071 4921 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992169 4921 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992173 4921 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992177 4921 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992181 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992185 4921 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992189 4921 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992193 4921 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992197 4921 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992201 4921 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992205 4921 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992210 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992214 4921 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992218 4921 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992223 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992227 4921 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992230 4921 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992234 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992240 4921 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992244 4921 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992248 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992252 4921 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992257 4921 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992261 4921 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992265 4921 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992268 4921 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992271 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992275 4921 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992278 4921 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992282 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992285 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992290 4921 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992294 4921 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992299 4921 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992303 4921 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992307 4921 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992310 4921 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992314 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992318 4921 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992321 4921 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992325 4921 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992328 4921 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992331 4921 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992335 4921 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992339 4921 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992344 4921 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992348 4921 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992352 4921 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992356 4921 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992359 4921 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992366 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992369 4921 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992373 4921 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992376 4921 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992380 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992383 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992418 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992423 4921 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992427 4921 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992431 4921 feature_gate.go:330] unrecognized feature gate: Example Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992434 4921 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992437 4921 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992441 4921 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992444 4921 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992448 4921 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992451 4921 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992455 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992458 4921 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992461 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992465 4921 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992468 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:42.992472 4921 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:42.992479 4921 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.007079 4921 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.007122 4921 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007248 4921 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007262 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007272 4921 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007282 4921 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007292 4921 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007300 4921 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007307 4921 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007315 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007323 4921 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007331 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007339 4921 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007347 4921 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007355 4921 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007363 4921 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007371 4921 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007379 4921 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007422 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007434 4921 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007444 4921 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007455 4921 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007464 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007472 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007479 4921 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007487 4921 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007495 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007506 4921 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007519 4921 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007529 4921 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007537 4921 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007548 4921 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007560 4921 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007569 4921 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007577 4921 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007589 4921 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007599 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007608 4921 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007618 4921 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007628 4921 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007637 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007645 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007653 4921 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007662 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007670 4921 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007678 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007686 4921 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007694 4921 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007702 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007709 4921 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007717 4921 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007725 4921 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007734 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007741 4921 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007749 4921 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007757 4921 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007765 4921 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007773 4921 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007780 4921 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007788 4921 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007796 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007804 4921 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007811 4921 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007820 4921 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007828 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007836 4921 feature_gate.go:330] unrecognized feature gate: Example Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007844 4921 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007852 4921 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007860 4921 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007868 4921 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007875 4921 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007883 4921 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.007891 4921 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.007904 4921 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008138 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008152 4921 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008161 4921 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008169 4921 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008178 4921 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008187 4921 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008195 4921 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008203 4921 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008211 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008219 4921 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008227 4921 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008234 4921 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008242 4921 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008251 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008259 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008266 4921 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008277 4921 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008288 4921 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008296 4921 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008306 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008316 4921 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008326 4921 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008335 4921 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008344 4921 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008353 4921 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008361 4921 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008371 4921 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008379 4921 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008422 4921 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008434 4921 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008444 4921 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008452 4921 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008460 4921 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008470 4921 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008478 4921 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008487 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008494 4921 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008505 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008513 4921 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008520 4921 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008528 4921 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008536 4921 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008544 4921 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008553 4921 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008560 4921 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008571 4921 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008582 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008590 4921 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008597 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008605 4921 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008613 4921 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008621 4921 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008629 4921 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008637 4921 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008644 4921 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008652 4921 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008661 4921 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008669 4921 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008676 4921 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008684 4921 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008691 4921 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008699 4921 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008706 4921 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008714 4921 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008721 4921 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008729 4921 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008737 4921 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008744 4921 feature_gate.go:330] unrecognized feature gate: Example Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008752 4921 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008759 4921 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.008767 4921 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.008781 4921 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.009041 4921 server.go:940] "Client rotation is on, will bootstrap in background" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.013370 4921 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.013578 4921 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.014482 4921 server.go:997] "Starting client certificate rotation" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.014526 4921 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.014756 4921 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-12 23:31:43.529148863 +0000 UTC Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.014853 4921 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 58h35m0.514301836s for next certificate rotation Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.040728 4921 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.043920 4921 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.054681 4921 log.go:25] "Validated CRI v1 runtime API" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.069914 4921 log.go:25] "Validated CRI v1 image API" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.072462 4921 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.077097 4921 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-10-12-50-18-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.077150 4921 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.093998 4921 manager.go:217] Machine: {Timestamp:2025-12-10 12:56:43.092786443 +0000 UTC m=+0.309008377 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:539c9d38-f260-4af7-b6c3-f4170bf93c3e BootID:aa6d129a-c0be-471d-913f-2184d68fb040 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:3b:a1:e5 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:3b:a1:e5 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5d:41:95 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:7c:cf:12 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f3:8d:c8 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:4c:f6:e5 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:76:27:25 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:3a:0d:8d:b2:e1:5f Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:62:fb:59:57:30:a5 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.094236 4921 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.094458 4921 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095138 4921 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095328 4921 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095371 4921 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095655 4921 topology_manager.go:138] "Creating topology manager with none policy" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095668 4921 container_manager_linux.go:303] "Creating device plugin manager" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095866 4921 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.095897 4921 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.096234 4921 state_mem.go:36] "Initialized new in-memory state store" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.096331 4921 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.097014 4921 kubelet.go:418] "Attempting to sync node with API server" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.097041 4921 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.097067 4921 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.097084 4921 kubelet.go:324] "Adding apiserver pod source" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.097101 4921 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.099517 4921 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.100078 4921 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.101110 4921 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.101599 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.101802 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.101603 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.101923 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102213 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102268 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102288 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102305 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102335 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102352 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102370 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102432 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102454 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102471 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102494 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102512 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.102835 4921 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.103796 4921 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.104226 4921 server.go:1280] "Started kubelet" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.104739 4921 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.105016 4921 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.105574 4921 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.106633 4921 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.106695 4921 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 10 12:56:43 crc systemd[1]: Started Kubernetes Kubelet. Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.107309 4921 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.107323 4921 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.107440 4921 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.107146 4921 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187fdbf0129eb47a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 12:56:43.103548538 +0000 UTC m=+0.319770492,LastTimestamp:2025-12-10 12:56:43.103548538 +0000 UTC m=+0.319770492,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.107837 4921 server.go:460] "Adding debug handlers to kubelet server" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.108534 4921 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 20:41:56.598411663 +0000 UTC Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.108584 4921 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 343h45m13.489832216s for next certificate rotation Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.109040 4921 factory.go:55] Registering systemd factory Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.109067 4921 factory.go:221] Registration of the systemd container factory successfully Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.109593 4921 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.109754 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="200ms" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.109932 4921 factory.go:153] Registering CRI-O factory Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.109957 4921 factory.go:221] Registration of the crio container factory successfully Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.110050 4921 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.110085 4921 factory.go:103] Registering Raw factory Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.110110 4921 manager.go:1196] Started watching for new ooms in manager Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.111796 4921 manager.go:319] Starting recovery of all containers Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.115832 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.115924 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.115288 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.116229 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118693 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118723 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118758 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118781 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118807 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118836 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118860 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118887 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118908 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.118937 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119000 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119041 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119068 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119100 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119128 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119154 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119192 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119220 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119247 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119267 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119290 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119318 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119339 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119365 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119440 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119475 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119497 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119525 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119551 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119587 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119617 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119643 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119673 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119702 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119738 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119769 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119795 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119834 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.119864 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.120343 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121445 4921 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121504 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121530 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121561 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121585 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121613 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121635 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121656 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121684 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121705 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121733 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121766 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121803 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121836 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121861 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121902 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121928 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121966 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.121989 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122017 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122042 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122063 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122089 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122121 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122150 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122178 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122201 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122232 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122256 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122279 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122305 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.122587 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124754 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124800 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124816 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124832 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124847 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124861 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124874 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124888 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124903 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124917 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124932 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124947 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124962 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124979 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.124997 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125014 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125030 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125043 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125064 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125089 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125103 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125119 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125139 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125155 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125169 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125182 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125197 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125222 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125236 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125250 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125265 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125288 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125304 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125326 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125342 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125356 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125372 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125402 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125418 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125433 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125446 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125459 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125477 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125489 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125503 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125518 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125532 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125545 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125560 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125573 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125622 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125637 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125651 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125666 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125678 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125691 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125712 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125725 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125740 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125752 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125767 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125782 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125812 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125824 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125837 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125847 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125863 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125874 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125886 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125896 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125906 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125918 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125928 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125938 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125948 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125957 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125969 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125981 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.125992 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126002 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126014 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126026 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126036 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126048 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126063 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126077 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126093 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126105 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126115 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126126 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126165 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126178 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126191 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126204 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126215 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126226 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126238 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126250 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126261 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126275 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126287 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126299 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126311 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126322 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126334 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126344 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126356 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126368 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126378 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126419 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126432 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126445 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126457 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126471 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126484 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126496 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126509 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126523 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126540 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126554 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126567 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126582 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126595 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126607 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126620 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126635 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126647 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126660 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126672 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126686 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126701 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126713 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126726 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126740 4921 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126752 4921 reconstruct.go:97] "Volume reconstruction finished" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.126763 4921 reconciler.go:26] "Reconciler: start to sync state" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.136098 4921 manager.go:324] Recovery completed Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.157883 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.160898 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.160969 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.160987 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.161699 4921 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.161735 4921 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.161762 4921 state_mem.go:36] "Initialized new in-memory state store" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.173332 4921 policy_none.go:49] "None policy: Start" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.180338 4921 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.180414 4921 state_mem.go:35] "Initializing new in-memory state store" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.187065 4921 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.191462 4921 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.191519 4921 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.191545 4921 kubelet.go:2335] "Starting kubelet main sync loop" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.191599 4921 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.194815 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.194972 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.210673 4921 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.258779 4921 manager.go:334] "Starting Device Plugin manager" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.258851 4921 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.258869 4921 server.go:79] "Starting device plugin registration server" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.259690 4921 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.259712 4921 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.260173 4921 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.260276 4921 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.260290 4921 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.278614 4921 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.292697 4921 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.292819 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.294090 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.294140 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.294153 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.294360 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.294734 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.294789 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295593 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295635 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295693 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295715 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295730 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.295757 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296004 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296066 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296505 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296539 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296553 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296700 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296887 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.296919 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297616 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297640 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297647 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297792 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297820 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297838 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297848 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297871 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297882 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.297998 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.298225 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.298256 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299243 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299276 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299287 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299517 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299541 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299762 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.299776 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.300313 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.300343 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.300354 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.311471 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="400ms" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328428 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328494 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328521 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328548 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328576 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328603 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328633 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328660 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328683 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328707 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328731 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328753 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328774 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328807 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.328829 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.360076 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.361608 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.361659 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.361671 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.361699 4921 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.362434 4921 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430298 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430360 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430418 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430454 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430485 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430513 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430546 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430551 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430545 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430605 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430589 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430734 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430737 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430833 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430864 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430897 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430927 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430968 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430979 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.430981 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431001 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431014 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431032 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431025 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431054 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431061 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431075 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431106 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431142 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.431122 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.562652 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.565046 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.565095 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.565105 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.565137 4921 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.565740 4921 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.627916 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.636279 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.664387 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.664787 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-b441c3ced3171879085e0cf8ff77fa8ddb04d26a57b7e83232fb5922fe1a2239 WatchSource:0}: Error finding container b441c3ced3171879085e0cf8ff77fa8ddb04d26a57b7e83232fb5922fe1a2239: Status 404 returned error can't find the container with id b441c3ced3171879085e0cf8ff77fa8ddb04d26a57b7e83232fb5922fe1a2239 Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.667693 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-1c210c44f9697b1347153a6f3a051b6d7dddcac71346be5aa0bedc71d53e44be WatchSource:0}: Error finding container 1c210c44f9697b1347153a6f3a051b6d7dddcac71346be5aa0bedc71d53e44be: Status 404 returned error can't find the container with id 1c210c44f9697b1347153a6f3a051b6d7dddcac71346be5aa0bedc71d53e44be Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.681750 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-8e17ccd6d7620a53b86eb29f6a98739d7a90e42218fa3edb2e3d93d188b7db04 WatchSource:0}: Error finding container 8e17ccd6d7620a53b86eb29f6a98739d7a90e42218fa3edb2e3d93d188b7db04: Status 404 returned error can't find the container with id 8e17ccd6d7620a53b86eb29f6a98739d7a90e42218fa3edb2e3d93d188b7db04 Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.690516 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.699035 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.706087 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-02b5fe831b23acd36e0848b8a86118296a768c917dea7f4e0819804932419870 WatchSource:0}: Error finding container 02b5fe831b23acd36e0848b8a86118296a768c917dea7f4e0819804932419870: Status 404 returned error can't find the container with id 02b5fe831b23acd36e0848b8a86118296a768c917dea7f4e0819804932419870 Dec 10 12:56:43 crc kubenswrapper[4921]: W1210 12:56:43.710123 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-be188414d3f0fc27500bd51e177c38d1d03a670a0b381e2d08ef416c5969b31a WatchSource:0}: Error finding container be188414d3f0fc27500bd51e177c38d1d03a670a0b381e2d08ef416c5969b31a: Status 404 returned error can't find the container with id be188414d3f0fc27500bd51e177c38d1d03a670a0b381e2d08ef416c5969b31a Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.713697 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="800ms" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.966651 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.968124 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.968169 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.968183 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:43 crc kubenswrapper[4921]: I1210 12:56:43.968210 4921 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 12:56:43 crc kubenswrapper[4921]: E1210 12:56:43.968747 4921 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Dec 10 12:56:44 crc kubenswrapper[4921]: W1210 12:56:44.058032 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:44 crc kubenswrapper[4921]: E1210 12:56:44.058490 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:44 crc kubenswrapper[4921]: W1210 12:56:44.066420 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:44 crc kubenswrapper[4921]: E1210 12:56:44.066497 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.105715 4921 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:44 crc kubenswrapper[4921]: W1210 12:56:44.181031 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:44 crc kubenswrapper[4921]: E1210 12:56:44.181141 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.200422 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1" exitCode=0 Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.200459 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.200703 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8e17ccd6d7620a53b86eb29f6a98739d7a90e42218fa3edb2e3d93d188b7db04"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.200902 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.203104 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.203141 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.203153 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.205706 4921 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155" exitCode=0 Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.205793 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.205873 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1c210c44f9697b1347153a6f3a051b6d7dddcac71346be5aa0bedc71d53e44be"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.206059 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.206884 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.207472 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.207575 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.207630 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.216537 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.216579 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.216596 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.216902 4921 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="dc40d8e5cb90e8e3bdccb7f5f5e59a1fc468fef046deda2bdf07092cb9658a84" exitCode=0 Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.216986 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"dc40d8e5cb90e8e3bdccb7f5f5e59a1fc468fef046deda2bdf07092cb9658a84"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.217076 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b441c3ced3171879085e0cf8ff77fa8ddb04d26a57b7e83232fb5922fe1a2239"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.217214 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.218838 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.218868 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.218878 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.220016 4921 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f" exitCode=0 Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.220104 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.220154 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"be188414d3f0fc27500bd51e177c38d1d03a670a0b381e2d08ef416c5969b31a"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.220253 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.221479 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.221521 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.221533 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.223076 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543"} Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.223105 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"02b5fe831b23acd36e0848b8a86118296a768c917dea7f4e0819804932419870"} Dec 10 12:56:44 crc kubenswrapper[4921]: W1210 12:56:44.385265 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Dec 10 12:56:44 crc kubenswrapper[4921]: E1210 12:56:44.385402 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Dec 10 12:56:44 crc kubenswrapper[4921]: E1210 12:56:44.516206 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="1.6s" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.769801 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.771740 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.771824 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.771836 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:44 crc kubenswrapper[4921]: I1210 12:56:44.771881 4921 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 12:56:44 crc kubenswrapper[4921]: E1210 12:56:44.772439 4921 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.227528 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0c698fe17a324b47c395e8f4e6e693eb79b4265e185c49b1d9fe883cb0c96642"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.227731 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.228734 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.228768 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.228777 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.231606 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.231643 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.231653 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.231763 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.232798 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.232834 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.232846 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.234472 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.234457 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.234515 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.234527 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.235236 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.235262 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.235271 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.237202 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.237234 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.237248 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.237279 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.238946 4921 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a" exitCode=0 Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.238976 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a"} Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.239176 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.239937 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.239961 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.239972 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:45 crc kubenswrapper[4921]: I1210 12:56:45.272760 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.246833 4921 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac" exitCode=0 Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.246912 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac"} Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.247097 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.248962 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.249000 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.249014 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.252716 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.253246 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.253531 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4"} Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.253599 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254105 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254138 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254152 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254310 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254333 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254341 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254801 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254821 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.254832 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.372638 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.374179 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.374220 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.374233 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.374262 4921 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.462368 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.738859 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.743223 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.796155 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:46 crc kubenswrapper[4921]: I1210 12:56:46.984922 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.260568 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02"} Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.260641 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f"} Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.260660 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c"} Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.260674 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8"} Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.260686 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.260840 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.261878 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.261903 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.261915 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.262036 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.262654 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.262698 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.262714 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.263154 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.263175 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:47 crc kubenswrapper[4921]: I1210 12:56:47.263186 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.050806 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.267866 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4"} Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.268014 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.268039 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.268507 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269400 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269440 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269441 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269474 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269489 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269445 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269519 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269530 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:48 crc kubenswrapper[4921]: I1210 12:56:48.269449 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.013763 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.270781 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.270912 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.272532 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.272604 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.272630 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.272920 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.272976 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.272994 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:49 crc kubenswrapper[4921]: I1210 12:56:49.499579 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.115572 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.115815 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.117742 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.117807 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.117824 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.274874 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.274927 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.280819 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.280875 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.280897 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.280913 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.280917 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:50 crc kubenswrapper[4921]: I1210 12:56:50.280935 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:51 crc kubenswrapper[4921]: I1210 12:56:51.540990 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 10 12:56:51 crc kubenswrapper[4921]: I1210 12:56:51.541258 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:51 crc kubenswrapper[4921]: I1210 12:56:51.542805 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:51 crc kubenswrapper[4921]: I1210 12:56:51.542926 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:51 crc kubenswrapper[4921]: I1210 12:56:51.542971 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:53 crc kubenswrapper[4921]: I1210 12:56:53.116376 4921 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 12:56:53 crc kubenswrapper[4921]: I1210 12:56:53.116638 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 12:56:53 crc kubenswrapper[4921]: E1210 12:56:53.279587 4921 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 10 12:56:55 crc kubenswrapper[4921]: I1210 12:56:55.105555 4921 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 10 12:56:55 crc kubenswrapper[4921]: W1210 12:56:55.833567 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 10 12:56:55 crc kubenswrapper[4921]: I1210 12:56:55.833702 4921 trace.go:236] Trace[1264233944]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 12:56:45.831) (total time: 10002ms): Dec 10 12:56:55 crc kubenswrapper[4921]: Trace[1264233944]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (12:56:55.833) Dec 10 12:56:55 crc kubenswrapper[4921]: Trace[1264233944]: [10.002137553s] [10.002137553s] END Dec 10 12:56:55 crc kubenswrapper[4921]: E1210 12:56:55.833731 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 10 12:56:56 crc kubenswrapper[4921]: E1210 12:56:56.116995 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 10 12:56:56 crc kubenswrapper[4921]: W1210 12:56:56.221239 4921 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.221358 4921 trace.go:236] Trace[1086311155]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 12:56:46.219) (total time: 10002ms): Dec 10 12:56:56 crc kubenswrapper[4921]: Trace[1086311155]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (12:56:56.221) Dec 10 12:56:56 crc kubenswrapper[4921]: Trace[1086311155]: [10.002297048s] [10.002297048s] END Dec 10 12:56:56 crc kubenswrapper[4921]: E1210 12:56:56.221401 4921 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.266119 4921 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.266210 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.277318 4921 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.277408 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.476670 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.476853 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.478249 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.478304 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:56 crc kubenswrapper[4921]: I1210 12:56:56.478317 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:59 crc kubenswrapper[4921]: I1210 12:56:59.505075 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:56:59 crc kubenswrapper[4921]: I1210 12:56:59.505288 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:56:59 crc kubenswrapper[4921]: I1210 12:56:59.506427 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:56:59 crc kubenswrapper[4921]: I1210 12:56:59.506463 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:56:59 crc kubenswrapper[4921]: I1210 12:56:59.506484 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:56:59 crc kubenswrapper[4921]: I1210 12:56:59.509599 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:57:00 crc kubenswrapper[4921]: I1210 12:57:00.017725 4921 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 10 12:57:00 crc kubenswrapper[4921]: I1210 12:57:00.305804 4921 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 12:57:00 crc kubenswrapper[4921]: I1210 12:57:00.305906 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:57:00 crc kubenswrapper[4921]: I1210 12:57:00.307694 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:00 crc kubenswrapper[4921]: I1210 12:57:00.307763 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:00 crc kubenswrapper[4921]: I1210 12:57:00.307782 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.266725 4921 trace.go:236] Trace[168664251]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 12:56:46.521) (total time: 14744ms): Dec 10 12:57:01 crc kubenswrapper[4921]: Trace[168664251]: ---"Objects listed" error: 14744ms (12:57:01.266) Dec 10 12:57:01 crc kubenswrapper[4921]: Trace[168664251]: [14.744876757s] [14.744876757s] END Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.266762 4921 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.268208 4921 trace.go:236] Trace[1286161742]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 12:56:47.034) (total time: 14233ms): Dec 10 12:57:01 crc kubenswrapper[4921]: Trace[1286161742]: ---"Objects listed" error: 14233ms (12:57:01.268) Dec 10 12:57:01 crc kubenswrapper[4921]: Trace[1286161742]: [14.233992306s] [14.233992306s] END Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.268239 4921 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.268860 4921 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 10 12:57:01 crc kubenswrapper[4921]: E1210 12:57:01.270546 4921 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.309331 4921 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60528->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.309365 4921 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60540->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.309424 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60528->192.168.126.11:17697: read: connection reset by peer" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.309480 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60540->192.168.126.11:17697: read: connection reset by peer" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.309879 4921 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.309940 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.577617 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.593275 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.874515 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.879048 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:57:01 crc kubenswrapper[4921]: I1210 12:57:01.925417 4921 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.107416 4921 apiserver.go:52] "Watching apiserver" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.110529 4921 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.110830 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-etcd/etcd-crc"] Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.111412 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.111473 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.111554 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.111575 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.111740 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.111701 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.111682 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.112082 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.112341 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.113667 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.114212 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.114326 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.114524 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.114680 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.114877 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.115154 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.115846 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.116085 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.139122 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.163639 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.174925 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.187652 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.199628 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.208962 4921 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.212841 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.225218 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.241645 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.252991 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274745 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274831 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274860 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274887 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274911 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274937 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274960 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.274987 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275014 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275034 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275059 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275081 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275104 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275136 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275160 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275187 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275214 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275243 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275270 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275310 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275334 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275366 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275418 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275441 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275796 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275888 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.275911 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276016 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276037 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276067 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276190 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276221 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276322 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276364 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276426 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276514 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276580 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276621 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276694 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276691 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276714 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276814 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.276857 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277020 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277077 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277123 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277144 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277129 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277229 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277258 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277282 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277304 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277328 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277353 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277378 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277420 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277442 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277464 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277487 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277509 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277533 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277553 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277573 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277592 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277612 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277631 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277666 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277688 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277710 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277733 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277755 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277777 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277799 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277822 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277941 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277964 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277982 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278003 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278030 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278106 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278137 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278163 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278191 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278214 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278237 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278259 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278282 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278310 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278334 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278362 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278409 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278440 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278466 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278489 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278517 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278544 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278571 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278594 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278619 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278643 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278665 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278689 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278712 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278741 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278765 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278798 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278824 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278868 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278898 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278922 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278947 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278969 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278992 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279013 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279035 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279057 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279104 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279125 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279143 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279161 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279179 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279197 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279217 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279240 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279260 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279279 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279298 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279315 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279333 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279350 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279368 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279401 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279420 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279442 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279460 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279485 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279503 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279521 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279539 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279559 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279576 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279597 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279616 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279635 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279656 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279673 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279689 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279730 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279750 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279768 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279785 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279806 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279826 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279844 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279863 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279882 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279898 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279918 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279935 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279952 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279971 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280200 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280234 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280260 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280278 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280297 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280314 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280336 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280356 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280373 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280414 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280434 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280452 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280479 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277151 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277420 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290670 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277417 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290946 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277430 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277457 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277568 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277628 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277714 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277804 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.277988 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278059 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278299 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278346 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278593 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.278701 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279061 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279224 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279409 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279759 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.279989 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280191 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280397 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280432 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.280496 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:57:02.780468269 +0000 UTC m=+19.996690193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280500 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280532 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.280890 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281000 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281018 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281143 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281231 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281230 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281333 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281469 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281639 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.281674 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282111 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282159 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282311 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282379 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282460 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282479 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282535 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282660 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.282930 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.283743 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.283977 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284064 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284162 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284177 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284321 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284472 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284577 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284697 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.284873 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.285021 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.285294 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.285692 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.285700 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.285772 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.285884 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286005 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286135 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286220 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286222 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286259 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286493 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286509 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286559 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286680 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286707 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286723 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286731 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286886 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286909 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.286976 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.287069 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.287359 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.287864 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.288295 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.288340 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.288658 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.288771 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.288773 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.288859 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.289004 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.289212 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.289471 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.289493 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.289538 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.289841 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290044 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290250 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290537 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290524 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.290585 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.291038 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.291359 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.291612 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.291716 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.291865 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.291994 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.292103 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.292287 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.292299 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.292699 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.292987 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293240 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293168 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293421 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293494 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293526 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293557 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293584 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293614 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293642 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293665 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293669 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293721 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293751 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293779 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293808 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293837 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293936 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.293997 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.294027 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.294059 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.294349 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.294593 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.294860 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.294887 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.295251 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.295614 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.295740 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296099 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296139 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296356 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296446 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296622 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296653 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296778 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.296821 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.297066 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.297345 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.297804 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.297933 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298016 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298053 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298074 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298083 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298161 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298190 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298226 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298322 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298460 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298534 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298566 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298594 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298622 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298704 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298708 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298774 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298805 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298833 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298868 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298898 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298924 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298953 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.299016 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.298980 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307313 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307364 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307402 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307421 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307496 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307521 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307546 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307566 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307583 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307621 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307648 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307671 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307689 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307705 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307725 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307787 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307811 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307898 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308038 4921 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308052 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308063 4921 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308073 4921 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308115 4921 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308126 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308136 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.299649 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308196 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.299791 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308182 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308267 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.300978 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.301940 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.302381 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.302786 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.303333 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.305296 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.306849 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307465 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.307744 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308177 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.308747 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.309896 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.309926 4921 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.314157 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.314642 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.314708 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.317793 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.317902 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.318084 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:02.818056485 +0000 UTC m=+20.034278409 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.318424 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.318733 4921 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.319136 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.320190 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:02.820172279 +0000 UTC m=+20.036394223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320220 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320239 4921 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320254 4921 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320267 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320280 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320298 4921 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320311 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320324 4921 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320337 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320349 4921 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320361 4921 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320372 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320400 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320412 4921 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320424 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320435 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320446 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320456 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320465 4921 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320473 4921 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320482 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320492 4921 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320501 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320510 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320519 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320529 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320541 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320554 4921 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320565 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320574 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320583 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320593 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320602 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320612 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320622 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320633 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320642 4921 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320652 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320660 4921 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320669 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320678 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320688 4921 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320698 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320707 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320717 4921 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320725 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320734 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320745 4921 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320753 4921 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320762 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320770 4921 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320779 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320788 4921 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320798 4921 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320807 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320817 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320826 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320834 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320843 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320852 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320861 4921 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320876 4921 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320886 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320895 4921 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320903 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320911 4921 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320919 4921 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320928 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320937 4921 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320946 4921 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320954 4921 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320963 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320972 4921 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320981 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.320992 4921 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321002 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321011 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321020 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321029 4921 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321038 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321046 4921 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321055 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321063 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321073 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321082 4921 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321090 4921 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321099 4921 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321109 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321118 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321127 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321136 4921 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321145 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321154 4921 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321163 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321172 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321181 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321190 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321198 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321206 4921 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321214 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321223 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321232 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321240 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321249 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321258 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321266 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321275 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321283 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321293 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321304 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321314 4921 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321323 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321333 4921 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321341 4921 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321349 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321358 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321396 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321405 4921 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321414 4921 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321423 4921 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321432 4921 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321441 4921 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321450 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321459 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321468 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321477 4921 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321487 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321502 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321511 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321520 4921 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321529 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321537 4921 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321546 4921 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321555 4921 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321564 4921 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321573 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321582 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321591 4921 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321603 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321611 4921 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321620 4921 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321630 4921 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321639 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321649 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.321904 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.322548 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.323876 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.327312 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.327879 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.328076 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.328366 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.333171 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.335038 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.335064 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.335193 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.335228 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.335653 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.335807 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.337461 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.340083 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.342160 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.352353 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.355560 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.355830 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.356718 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4" exitCode=255 Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.357442 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4"} Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.358377 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.358535 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.360624 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.360653 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.360669 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.360723 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:02.860705311 +0000 UTC m=+20.076927235 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.361900 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.374045 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.380985 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.381031 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.381046 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.381124 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:02.88109738 +0000 UTC m=+20.097319304 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.392848 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.395204 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.395615 4921 scope.go:117] "RemoveContainer" containerID="3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.402832 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.405820 4921 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.418776 4921 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.422896 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.422994 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.426581 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.433369 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.433869 4921 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434032 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434079 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434212 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434613 4921 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434851 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434874 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434883 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434893 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434904 4921 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434933 4921 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434944 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434953 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434965 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434975 4921 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434985 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.434996 4921 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435008 4921 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435017 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435027 4921 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435039 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435049 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435058 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435067 4921 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435278 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435299 4921 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435308 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435321 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435331 4921 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435341 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435351 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435365 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435373 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435398 4921 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435410 4921 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435579 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435592 4921 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435601 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435611 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435647 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.435721 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.446017 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.446754 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.451619 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.460307 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.470559 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.488979 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.503823 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.517985 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.538628 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.538964 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.538989 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.539004 4921 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.731581 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 12:57:02 crc kubenswrapper[4921]: W1210 12:57:02.743900 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-7111b6ba41f514220f2254c4cded46444df9dbf3ecaeed3481ca56525af875bc WatchSource:0}: Error finding container 7111b6ba41f514220f2254c4cded46444df9dbf3ecaeed3481ca56525af875bc: Status 404 returned error can't find the container with id 7111b6ba41f514220f2254c4cded46444df9dbf3ecaeed3481ca56525af875bc Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.840264 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.840348 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.840379 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.840503 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.840560 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:57:03.840530022 +0000 UTC m=+21.056751986 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.840595 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:03.840582473 +0000 UTC m=+21.056804437 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.840673 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.840876 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:03.840819829 +0000 UTC m=+21.057041913 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.941528 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:02 crc kubenswrapper[4921]: I1210 12:57:02.941594 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941734 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941738 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941783 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941752 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941798 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941806 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941872 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:03.94185008 +0000 UTC m=+21.158072004 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:02 crc kubenswrapper[4921]: E1210 12:57:02.941892 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:03.941885811 +0000 UTC m=+21.158107735 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.196416 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.197165 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.209372 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.222222 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.223199 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.223526 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.224371 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.224950 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.225996 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.226616 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.227641 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.228241 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.228794 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.229845 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.230305 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.231197 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.231692 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.232675 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.233227 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.233610 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.234624 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.235194 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.235364 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.235736 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.236803 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.237242 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.238339 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.238760 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.239942 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.240798 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.241757 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.242296 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.243306 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.243840 4921 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.243964 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.246064 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.246599 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.246920 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.247014 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.248569 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.249510 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.250441 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.251104 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.252412 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.252973 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.253715 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.254839 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.255986 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.256750 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.257804 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.258352 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.259597 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.260665 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.261854 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.262416 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.263147 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.264553 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.265125 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.269093 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.297603 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.313649 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.332063 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.353498 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.361596 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.363756 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.364329 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.364812 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7111b6ba41f514220f2254c4cded46444df9dbf3ecaeed3481ca56525af875bc"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.366432 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.366467 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4fe477ad1af71c1b4150efadca1cf7493d9873fbc267e495c9796ac94c09ae51"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.368635 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.368678 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.368688 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dc0683b2e41dea2a4994adcba48b42a2e1121b5a072a21c2fe5c296616310f97"} Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.392468 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.415944 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.443475 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.461118 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.477783 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.492504 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.512651 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.527816 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.541079 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.555478 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.573464 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.599770 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.612925 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.635296 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.651818 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.673816 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.692435 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.706955 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.847632 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.847786 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.847826 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.847917 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.847980 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:05.84796149 +0000 UTC m=+23.064183414 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.848358 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.848422 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:57:05.848366381 +0000 UTC m=+23.064588305 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.848700 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:05.848669939 +0000 UTC m=+23.064891893 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.948351 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:03 crc kubenswrapper[4921]: I1210 12:57:03.948910 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.948861 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.949164 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.949258 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.949399 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:05.949368301 +0000 UTC m=+23.165590225 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.949048 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.949947 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.950019 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:03 crc kubenswrapper[4921]: E1210 12:57:03.950110 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:05.9501018 +0000 UTC m=+23.166323724 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.192309 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.192467 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.192599 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.192757 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.193016 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.193258 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.471436 4921 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.473002 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.473036 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.473044 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.473104 4921 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.490382 4921 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.490821 4921 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.492274 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.492332 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.492352 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.492378 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.492423 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.512059 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:04Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.516185 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.516228 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.516241 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.516264 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.516280 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.530266 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:04Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.534643 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.534685 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.534696 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.534713 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.534729 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.547368 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:04Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.551449 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.551499 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.551512 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.551530 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.551545 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.565884 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:04Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.569719 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.569788 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.569800 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.569823 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.569835 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.585239 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:04Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:04 crc kubenswrapper[4921]: E1210 12:57:04.585503 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.587632 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.587698 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.587711 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.587766 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.587784 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.690942 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.690985 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.690997 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.691021 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.691033 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.793827 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.793895 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.793908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.793931 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.793950 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.896341 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.896407 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.896421 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.896441 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.896453 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.999049 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.999112 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.999197 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.999218 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:04 crc kubenswrapper[4921]: I1210 12:57:04.999235 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:04Z","lastTransitionTime":"2025-12-10T12:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.102943 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.102997 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.103020 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.103052 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.103073 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.205501 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.205550 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.205560 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.205578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.205590 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.309251 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.309321 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.309341 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.309385 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.309449 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.374738 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.397649 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.413212 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.413286 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.413310 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.413343 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.413450 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.424512 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.445350 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.472333 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.488335 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.504565 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.516516 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.516555 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.516564 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.516585 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.516595 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.520922 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.539699 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.559858 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:05Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.619459 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.619505 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.619515 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.619533 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.619543 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.722960 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.723005 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.723017 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.723036 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.723050 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.826908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.826954 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.826967 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.826988 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.827002 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.866155 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.866275 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.866365 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.866505 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.866651 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.866657 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:09.866611622 +0000 UTC m=+27.082833696 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.866770 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:09.866746205 +0000 UTC m=+27.082968169 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.866941 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:57:09.8669226 +0000 UTC m=+27.083144654 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.930632 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.930689 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.930703 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.930724 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.930740 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:05Z","lastTransitionTime":"2025-12-10T12:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.967346 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:05 crc kubenswrapper[4921]: I1210 12:57:05.967469 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967634 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967656 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967657 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967737 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967759 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967671 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967843 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:09.967817598 +0000 UTC m=+27.184039562 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:05 crc kubenswrapper[4921]: E1210 12:57:05.967897 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:09.967858409 +0000 UTC m=+27.184080503 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.034300 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.034352 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.034367 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.034412 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.034426 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.138245 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.138304 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.138317 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.138340 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.138352 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.192111 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.192177 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.192204 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:06 crc kubenswrapper[4921]: E1210 12:57:06.192297 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:06 crc kubenswrapper[4921]: E1210 12:57:06.192421 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:06 crc kubenswrapper[4921]: E1210 12:57:06.192570 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.241105 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.241162 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.241173 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.241190 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.241203 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.343725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.343788 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.343803 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.343825 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.343841 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.447159 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.447226 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.447243 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.447270 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.447287 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.550439 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.550519 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.550538 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.550563 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.550581 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.654094 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.654154 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.654170 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.654193 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.654239 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.757199 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.757245 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.757260 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.757282 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.757301 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.860632 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.860746 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.860774 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.860810 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.860835 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.963882 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.963930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.963947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.963966 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:06 crc kubenswrapper[4921]: I1210 12:57:06.963979 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:06Z","lastTransitionTime":"2025-12-10T12:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.066194 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.066229 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.066238 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.066253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.066263 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.168408 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.168448 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.168461 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.168477 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.168487 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.271333 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.271370 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.271379 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.271406 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.271415 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.373677 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.373952 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.374039 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.374102 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.374171 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.476753 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.476783 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.476791 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.476805 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.476814 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.580587 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.581400 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.581499 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.581570 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.581637 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.684643 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.684682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.684694 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.684710 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.684720 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.787438 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.787487 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.787497 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.787514 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.787523 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.895973 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.896043 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.896058 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.896081 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.896094 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.998968 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.999017 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.999030 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.999049 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:07 crc kubenswrapper[4921]: I1210 12:57:07.999060 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:07Z","lastTransitionTime":"2025-12-10T12:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.102149 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.102212 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.102224 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.102244 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.102256 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.191991 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.192046 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.192067 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:08 crc kubenswrapper[4921]: E1210 12:57:08.192169 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:08 crc kubenswrapper[4921]: E1210 12:57:08.192505 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:08 crc kubenswrapper[4921]: E1210 12:57:08.192599 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.204887 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.204932 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.204943 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.204963 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.204982 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.297457 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-zmks6"] Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.297850 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:08 crc kubenswrapper[4921]: W1210 12:57:08.302169 4921 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 10 12:57:08 crc kubenswrapper[4921]: E1210 12:57:08.302238 4921 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 12:57:08 crc kubenswrapper[4921]: W1210 12:57:08.302341 4921 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 10 12:57:08 crc kubenswrapper[4921]: E1210 12:57:08.302408 4921 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.309676 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.309717 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.309730 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.309749 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.309761 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.311829 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.329006 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.342898 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.355766 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.368264 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.380292 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.389910 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft9kj\" (UniqueName: \"kubernetes.io/projected/1f2626c5-78df-45d2-8970-c4f99790a0fb-kube-api-access-ft9kj\") pod \"node-resolver-zmks6\" (UID: \"1f2626c5-78df-45d2-8970-c4f99790a0fb\") " pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.390016 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1f2626c5-78df-45d2-8970-c4f99790a0fb-hosts-file\") pod \"node-resolver-zmks6\" (UID: \"1f2626c5-78df-45d2-8970-c4f99790a0fb\") " pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.399095 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.413410 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.413465 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.413477 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.413500 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.413514 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.414309 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.426972 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.446242 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.467924 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.491545 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft9kj\" (UniqueName: \"kubernetes.io/projected/1f2626c5-78df-45d2-8970-c4f99790a0fb-kube-api-access-ft9kj\") pod \"node-resolver-zmks6\" (UID: \"1f2626c5-78df-45d2-8970-c4f99790a0fb\") " pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.491593 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1f2626c5-78df-45d2-8970-c4f99790a0fb-hosts-file\") pod \"node-resolver-zmks6\" (UID: \"1f2626c5-78df-45d2-8970-c4f99790a0fb\") " pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.491716 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1f2626c5-78df-45d2-8970-c4f99790a0fb-hosts-file\") pod \"node-resolver-zmks6\" (UID: \"1f2626c5-78df-45d2-8970-c4f99790a0fb\") " pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.515843 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.515902 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.515912 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.515928 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.515940 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.618825 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.618914 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.618938 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.618969 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.618988 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.666497 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-vn2n6"] Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.666925 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.668701 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-86bpd"] Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.669556 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.669642 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-pqlx4"] Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.670092 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.670958 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.671049 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.671149 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.671246 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.671480 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.671886 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m7n89"] Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.672798 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.681971 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.682067 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.682272 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.682354 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.682505 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.682536 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.682676 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.686095 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.686526 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.686793 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.687012 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.687135 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.687021 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.687366 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694030 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cni-binary-copy\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694075 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-slash\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694097 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-hostroot\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694130 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-system-cni-dir\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694148 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z76j8\" (UniqueName: \"kubernetes.io/projected/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-kube-api-access-z76j8\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694177 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-ovn-kubernetes\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694201 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbm9s\" (UniqueName: \"kubernetes.io/projected/354355f7-6630-49a8-bdc5-5e875feecb7f-kube-api-access-dbm9s\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694222 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694242 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-netd\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694274 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-tuning-conf-dir\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694294 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-node-log\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694312 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-os-release\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694406 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-var-lib-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694432 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-script-lib\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694457 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/354355f7-6630-49a8-bdc5-5e875feecb7f-proxy-tls\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694479 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-env-overrides\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694500 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-system-cni-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694522 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-cni-multus\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694547 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhs2m\" (UniqueName: \"kubernetes.io/projected/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-kube-api-access-lhs2m\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694576 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694601 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-ovn\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694622 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-netns\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694643 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-conf-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694697 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694732 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-socket-dir-parent\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694771 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-multus-certs\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694795 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-log-socket\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694821 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/354355f7-6630-49a8-bdc5-5e875feecb7f-rootfs\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694898 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/354355f7-6630-49a8-bdc5-5e875feecb7f-mcd-auth-proxy-config\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694944 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-cni-bin\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694968 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cnibin\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.694991 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-kubelet\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695011 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-config\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695032 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-cnibin\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695053 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-etc-kubernetes\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695077 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/50684108-04fc-405c-82be-d21d16cd650b-ovn-node-metrics-cert\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695099 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ddcs\" (UniqueName: \"kubernetes.io/projected/50684108-04fc-405c-82be-d21d16cd650b-kube-api-access-9ddcs\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695124 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-systemd-units\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695149 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-netns\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695523 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-systemd\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695627 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-bin\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695652 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-daemon-config\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695670 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-k8s-cni-cncf-io\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695704 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-etc-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695732 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-kubelet\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695769 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-os-release\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695905 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-cni-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.695950 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-cni-binary-copy\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.699450 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.711784 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.721177 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.721220 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.721232 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.721249 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.721262 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.722035 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.736945 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.751977 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.764861 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.778991 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.793037 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796826 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-env-overrides\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796858 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-system-cni-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796876 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-cni-multus\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796906 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796931 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-ovn\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796948 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-netns\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.796983 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-conf-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797002 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhs2m\" (UniqueName: \"kubernetes.io/projected/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-kube-api-access-lhs2m\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797023 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797039 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-socket-dir-parent\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797057 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-log-socket\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797077 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/354355f7-6630-49a8-bdc5-5e875feecb7f-rootfs\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797092 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/354355f7-6630-49a8-bdc5-5e875feecb7f-mcd-auth-proxy-config\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797111 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-cni-bin\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797127 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-multus-certs\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797143 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cnibin\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797160 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-kubelet\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797177 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-config\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797196 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-cnibin\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797214 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-etc-kubernetes\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797232 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/50684108-04fc-405c-82be-d21d16cd650b-ovn-node-metrics-cert\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797247 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ddcs\" (UniqueName: \"kubernetes.io/projected/50684108-04fc-405c-82be-d21d16cd650b-kube-api-access-9ddcs\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797263 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-systemd-units\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797278 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-netns\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797294 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-systemd\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797309 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-bin\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797324 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-daemon-config\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797342 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-etc-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797357 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-k8s-cni-cncf-io\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797379 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-os-release\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797413 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-cni-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797430 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-cni-binary-copy\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797445 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-kubelet\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797466 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cni-binary-copy\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797482 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-slash\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797498 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-hostroot\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797524 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-system-cni-dir\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797543 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z76j8\" (UniqueName: \"kubernetes.io/projected/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-kube-api-access-z76j8\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797560 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-ovn-kubernetes\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797577 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbm9s\" (UniqueName: \"kubernetes.io/projected/354355f7-6630-49a8-bdc5-5e875feecb7f-kube-api-access-dbm9s\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797597 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797613 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-netd\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797637 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-tuning-conf-dir\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797638 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-env-overrides\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797668 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-node-log\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797733 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-node-log\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797749 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-os-release\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797776 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-var-lib-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797793 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-system-cni-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797798 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-script-lib\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797817 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-cni-multus\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.797821 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/354355f7-6630-49a8-bdc5-5e875feecb7f-proxy-tls\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798278 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-var-lib-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798366 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-os-release\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798428 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-netns\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798471 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798473 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-ovn\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798511 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-systemd\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798523 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-netns\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798537 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-bin\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798569 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-conf-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798909 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.798979 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-socket-dir-parent\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799023 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-log-socket\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799060 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/354355f7-6630-49a8-bdc5-5e875feecb7f-rootfs\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799064 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-daemon-config\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799103 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-etc-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799131 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-k8s-cni-cncf-io\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799287 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-os-release\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799354 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-script-lib\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799811 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/354355f7-6630-49a8-bdc5-5e875feecb7f-mcd-auth-proxy-config\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799823 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-config\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799851 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-cni-bin\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799874 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-run-multus-certs\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799906 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cnibin\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799929 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-kubelet\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799967 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-cnibin\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.799991 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-systemd-units\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800015 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-etc-kubernetes\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800040 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-system-cni-dir\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800106 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-netd\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800152 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-openvswitch\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800152 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-multus-cni-dir\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800636 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-cni-binary-copy\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800640 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-ovn-kubernetes\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800682 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-slash\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800697 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-hostroot\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800758 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-host-var-lib-kubelet\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.800790 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-cni-binary-copy\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.801918 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-tuning-conf-dir\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.803760 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/50684108-04fc-405c-82be-d21d16cd650b-ovn-node-metrics-cert\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.804193 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/354355f7-6630-49a8-bdc5-5e875feecb7f-proxy-tls\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.810776 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.819623 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ddcs\" (UniqueName: \"kubernetes.io/projected/50684108-04fc-405c-82be-d21d16cd650b-kube-api-access-9ddcs\") pod \"ovnkube-node-m7n89\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.822875 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z76j8\" (UniqueName: \"kubernetes.io/projected/709b4982-f2e6-4692-ab1a-c1d5b7d507ad-kube-api-access-z76j8\") pod \"multus-additional-cni-plugins-86bpd\" (UID: \"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\") " pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.823724 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhs2m\" (UniqueName: \"kubernetes.io/projected/78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e-kube-api-access-lhs2m\") pod \"multus-pqlx4\" (UID: \"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\") " pod="openshift-multus/multus-pqlx4" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.824578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.824625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.824651 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.824674 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.824689 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.827133 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbm9s\" (UniqueName: \"kubernetes.io/projected/354355f7-6630-49a8-bdc5-5e875feecb7f-kube-api-access-dbm9s\") pod \"machine-config-daemon-vn2n6\" (UID: \"354355f7-6630-49a8-bdc5-5e875feecb7f\") " pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.833212 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.848918 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.861875 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.878658 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.897930 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.910083 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.926883 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.926918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.926927 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.926942 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.926977 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:08Z","lastTransitionTime":"2025-12-10T12:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.927978 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.944676 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.963761 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.975998 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.992085 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:08Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:08 crc kubenswrapper[4921]: I1210 12:57:08.993161 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.002273 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-86bpd" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.010001 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.010175 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pqlx4" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.019975 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.034913 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.035000 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.035018 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.035046 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.035072 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.038011 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.061926 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.077102 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.089646 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.138980 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.139025 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.139034 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.139050 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.139059 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.242626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.242674 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.242683 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.242701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.242712 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.316579 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.345626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.345685 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.345700 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.345718 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.345730 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.385610 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerStarted","Data":"34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.385667 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerStarted","Data":"b25329b870ce760f2c2419fe33d339f6021d610be936bf3e7935d2f5b9aa0775"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.387855 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pqlx4" event={"ID":"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e","Type":"ContainerStarted","Data":"480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.387906 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pqlx4" event={"ID":"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e","Type":"ContainerStarted","Data":"a3c509f51c9aece42c58478ae50a7d6d2895439fd7ca8980ef81ef95984f9c02"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.389700 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" exitCode=0 Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.389773 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.389839 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"0c86cc1337c544dcdb33d7cf0733f5db47dddbe6b25adcaaf3f31fa873f9a175"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.392099 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.392151 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.392164 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"a53970bbc49505a4ce789cccdd57b56360188ba02a0b069a1bdbf97fef0a616c"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.423251 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.448483 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.448531 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.448559 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.448576 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.448587 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.464285 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.501958 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.535896 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.550895 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.550961 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.550977 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.550998 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.551012 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.572172 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.593751 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.615797 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.630807 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.644428 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.647860 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.654186 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft9kj\" (UniqueName: \"kubernetes.io/projected/1f2626c5-78df-45d2-8970-c4f99790a0fb-kube-api-access-ft9kj\") pod \"node-resolver-zmks6\" (UID: \"1f2626c5-78df-45d2-8970-c4f99790a0fb\") " pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.654333 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.654351 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.654361 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.654376 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.654400 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.662356 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.676766 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.700042 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.718246 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.733187 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.753263 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.762238 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.762279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.762290 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.762305 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.762314 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.777535 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.792998 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.806553 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.812618 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zmks6" Dec 10 12:57:09 crc kubenswrapper[4921]: W1210 12:57:09.826163 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f2626c5_78df_45d2_8970_c4f99790a0fb.slice/crio-22c98a271fece0992dc07e76f11536aa5c48e279cdeb438c9e6f0a60e88d16b0 WatchSource:0}: Error finding container 22c98a271fece0992dc07e76f11536aa5c48e279cdeb438c9e6f0a60e88d16b0: Status 404 returned error can't find the container with id 22c98a271fece0992dc07e76f11536aa5c48e279cdeb438c9e6f0a60e88d16b0 Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.826468 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.849191 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.865682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.865720 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.865745 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.865759 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.865768 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.867211 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.889024 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.907426 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.908518 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.908668 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.908721 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:09 crc kubenswrapper[4921]: E1210 12:57:09.908806 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:57:17.908784582 +0000 UTC m=+35.125006506 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:09 crc kubenswrapper[4921]: E1210 12:57:09.908865 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:09 crc kubenswrapper[4921]: E1210 12:57:09.908890 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:09 crc kubenswrapper[4921]: E1210 12:57:09.908962 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:17.908941126 +0000 UTC m=+35.125163050 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:09 crc kubenswrapper[4921]: E1210 12:57:09.908985 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:17.908977047 +0000 UTC m=+35.125198971 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.926430 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.941291 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.951779 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.966634 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.968265 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.968314 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.968329 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.968349 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.968364 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:09Z","lastTransitionTime":"2025-12-10T12:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:09 crc kubenswrapper[4921]: I1210 12:57:09.987041 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:09Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.009771 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.009829 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.009975 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.009992 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.010004 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.010028 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.010072 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.010086 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.010103 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:18.010089 +0000 UTC m=+35.226310924 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.010151 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:18.010129301 +0000 UTC m=+35.226351225 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.072193 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.072241 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.072253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.072270 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.072282 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.175713 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.175758 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.175769 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.175790 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.175806 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.192055 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.192128 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.192055 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.192199 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.192283 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:10 crc kubenswrapper[4921]: E1210 12:57:10.192433 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.279430 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.279474 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.279489 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.279506 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.279519 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.384082 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.384119 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.384128 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.384144 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.384155 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.397986 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.398059 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.398073 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.398086 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.398101 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.399672 4921 generic.go:334] "Generic (PLEG): container finished" podID="709b4982-f2e6-4692-ab1a-c1d5b7d507ad" containerID="34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c" exitCode=0 Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.399772 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerDied","Data":"34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.402802 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zmks6" event={"ID":"1f2626c5-78df-45d2-8970-c4f99790a0fb","Type":"ContainerStarted","Data":"d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.402871 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zmks6" event={"ID":"1f2626c5-78df-45d2-8970-c4f99790a0fb","Type":"ContainerStarted","Data":"22c98a271fece0992dc07e76f11536aa5c48e279cdeb438c9e6f0a60e88d16b0"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.430251 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.451039 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.468303 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.487705 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.487753 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.487791 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.487801 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.487822 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.487843 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.506245 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.524100 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.539460 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.553150 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.575341 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.591207 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.591768 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.591782 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.591800 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.591812 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.593082 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.611033 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.624916 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.638416 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.659290 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.672935 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.687447 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.694426 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.694488 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.694502 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.694563 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.694581 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.702658 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.718418 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.736724 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.754603 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.769229 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.786305 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.797519 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.797568 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.797583 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.797606 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.797621 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.805528 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.822311 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.835800 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.853660 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.865366 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.877650 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:10Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.900199 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.900252 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.900266 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.900288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:10 crc kubenswrapper[4921]: I1210 12:57:10.900300 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:10Z","lastTransitionTime":"2025-12-10T12:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.003439 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.003480 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.003489 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.003505 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.003516 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.106725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.106771 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.106783 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.106800 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.106810 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.209635 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.209712 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.209738 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.209807 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.209834 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.312584 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.312682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.312704 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.313128 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.313349 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.411137 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.413919 4921 generic.go:334] "Generic (PLEG): container finished" podID="709b4982-f2e6-4692-ab1a-c1d5b7d507ad" containerID="04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1" exitCode=0 Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.413977 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerDied","Data":"04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.415270 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.415303 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.415314 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.415332 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.415349 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.434785 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-jskgz"] Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.435207 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.440948 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.441037 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.441042 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.440984 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.443739 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.462772 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.477891 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.500876 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.523661 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.523722 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.523739 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.523765 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.523784 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.524335 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-serviceca\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.524479 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-host\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.524930 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m875h\" (UniqueName: \"kubernetes.io/projected/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-kube-api-access-m875h\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.530072 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.598858 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.620417 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.626350 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-host\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.626518 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m875h\" (UniqueName: \"kubernetes.io/projected/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-kube-api-access-m875h\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.626461 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-host\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.626676 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-serviceca\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.628476 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-serviceca\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.628910 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.628931 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.628940 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.628958 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.628970 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.639545 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.651612 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m875h\" (UniqueName: \"kubernetes.io/projected/5ad003cc-9fcc-4fc6-86b9-247b30013c0a-kube-api-access-m875h\") pod \"node-ca-jskgz\" (UID: \"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\") " pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.662195 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.684039 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.698917 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.710872 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.726581 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.731643 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.731688 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.731698 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.731720 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.731731 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.740818 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.755126 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.764841 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.764948 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jskgz" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.780465 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.793432 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.808987 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.831911 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.835687 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.835739 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.835750 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.835768 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.835780 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.856037 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.872267 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.886782 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.900507 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.924451 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.939089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.939127 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.939137 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.939158 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.939167 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:11Z","lastTransitionTime":"2025-12-10T12:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.939734 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.955018 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.969530 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:11 crc kubenswrapper[4921]: I1210 12:57:11.984408 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:11Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.042307 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.042377 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.042424 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.042453 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.042472 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.146440 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.146818 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.146831 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.146852 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.146865 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.191972 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.192029 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.191990 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:12 crc kubenswrapper[4921]: E1210 12:57:12.192241 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:12 crc kubenswrapper[4921]: E1210 12:57:12.192459 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:12 crc kubenswrapper[4921]: E1210 12:57:12.192552 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.250448 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.250522 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.250536 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.250561 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.250576 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.352875 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.352918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.352937 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.352956 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.352968 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.423122 4921 generic.go:334] "Generic (PLEG): container finished" podID="709b4982-f2e6-4692-ab1a-c1d5b7d507ad" containerID="adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e" exitCode=0 Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.423172 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerDied","Data":"adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.434158 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jskgz" event={"ID":"5ad003cc-9fcc-4fc6-86b9-247b30013c0a","Type":"ContainerStarted","Data":"a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.434220 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jskgz" event={"ID":"5ad003cc-9fcc-4fc6-86b9-247b30013c0a","Type":"ContainerStarted","Data":"f59486e0b308deb6da2ad59b559269ddb737b9782c485cfdafea64838f34da89"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.455983 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.456038 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.456049 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.456069 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.456086 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.460288 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.483545 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.505865 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.525378 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.537874 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.555030 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.562066 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.562143 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.562154 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.562171 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.562187 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.580131 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.602226 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.622568 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.639111 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.657297 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.667358 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.667437 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.667455 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.667470 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.667480 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.678073 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.708200 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.726263 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.742979 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.761009 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.770193 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.770231 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.770239 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.770254 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.770263 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.774677 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.791165 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.813717 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.839065 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.855103 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.871208 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.873549 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.873589 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.873610 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.873641 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.873666 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.903115 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.925270 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.941876 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.961718 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.973755 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.975806 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.975845 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.975860 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.975878 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.975890 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:12Z","lastTransitionTime":"2025-12-10T12:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:12 crc kubenswrapper[4921]: I1210 12:57:12.988197 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:12Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.003658 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.014724 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.078378 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.078436 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.078445 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.078464 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.078473 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.180701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.180755 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.180778 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.180798 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.180811 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.208818 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.222227 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.241839 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.254048 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.279107 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.282895 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.282930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.282944 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.282965 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.282981 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.307687 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.333637 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.349891 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.368013 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.386466 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.386524 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.386539 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.386560 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.386577 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.391034 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.433500 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.442407 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.445591 4921 generic.go:334] "Generic (PLEG): container finished" podID="709b4982-f2e6-4692-ab1a-c1d5b7d507ad" containerID="baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428" exitCode=0 Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.445631 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerDied","Data":"baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.474678 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.489874 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.490126 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.490254 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.490364 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.490513 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.513947 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.553447 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.590585 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.592344 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.592372 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.592399 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.592415 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.592426 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.632263 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.677236 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.694958 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.694992 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.695004 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.695024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.695040 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.722987 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.759372 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.789114 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.797856 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.797913 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.797933 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.797957 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.797972 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.830622 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.875401 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.901605 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.901671 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.901684 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.901705 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.901721 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:13Z","lastTransitionTime":"2025-12-10T12:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.912643 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.948729 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:13 crc kubenswrapper[4921]: I1210 12:57:13.993329 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:13Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.004089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.004143 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.004155 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.004173 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.004186 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.032006 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.080261 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.111966 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.112433 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.112518 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.112590 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.112650 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.113436 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.147768 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.191814 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.192033 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.191902 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.192236 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.192357 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.192417 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.193128 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.214879 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.214921 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.214931 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.214946 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.214957 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.317210 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.317275 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.317288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.317306 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.317318 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.420767 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.420809 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.420820 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.420837 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.420848 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.453521 4921 generic.go:334] "Generic (PLEG): container finished" podID="709b4982-f2e6-4692-ab1a-c1d5b7d507ad" containerID="0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c" exitCode=0 Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.453578 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerDied","Data":"0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.475157 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.513179 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.524126 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.524156 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.524165 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.524182 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.524191 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.529415 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.544176 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.559597 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.575192 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.592967 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.618817 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.626740 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.626767 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.626776 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.626808 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.626816 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.636125 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.651533 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.667590 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.685138 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.708567 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.731478 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.731549 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.731562 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.731603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.731617 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.749685 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.763726 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.763786 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.763798 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.763838 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.763851 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.779078 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.782866 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.782889 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.782898 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.782913 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.782923 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.789113 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.799534 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.804027 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.804074 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.804084 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.804102 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.804115 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.826148 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.830466 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.830525 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.830540 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.830582 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.830596 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.845970 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.849509 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.849560 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.849573 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.849596 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.849610 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.866570 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:14Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:14 crc kubenswrapper[4921]: E1210 12:57:14.866723 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.868983 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.869032 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.869051 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.869075 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.869093 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.972906 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.972990 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.973011 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.973038 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:14 crc kubenswrapper[4921]: I1210 12:57:14.973057 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:14Z","lastTransitionTime":"2025-12-10T12:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.076968 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.077053 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.077071 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.077102 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.077123 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.180091 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.180162 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.180204 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.180241 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.180268 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.283507 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.283555 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.283568 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.283589 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.283602 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.386854 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.386903 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.386916 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.386933 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.386944 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.464059 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerStarted","Data":"d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.490552 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.490597 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.490608 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.490629 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.490641 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.513005 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.549469 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.574583 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.593217 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.593273 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.593287 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.593306 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.593316 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.594337 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.627446 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.649412 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.664195 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.679226 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.695859 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.696722 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.696779 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.696796 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.696817 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.696831 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.714059 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.728159 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.745161 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.758967 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.772603 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.797475 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:15Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.798970 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.798998 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.799008 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.799024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.799034 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.901927 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.901973 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.901991 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.902009 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:15 crc kubenswrapper[4921]: I1210 12:57:15.902021 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:15Z","lastTransitionTime":"2025-12-10T12:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.004583 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.004626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.004636 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.004652 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.004664 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.107225 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.107261 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.107272 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.107289 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.107299 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.192515 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.192644 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:16 crc kubenswrapper[4921]: E1210 12:57:16.192701 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.192814 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:16 crc kubenswrapper[4921]: E1210 12:57:16.192944 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:16 crc kubenswrapper[4921]: E1210 12:57:16.193179 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.210195 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.210242 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.210267 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.210293 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.210313 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.314010 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.314098 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.314128 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.314161 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.314187 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.418876 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.418979 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.418999 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.419024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.419075 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.474420 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.475199 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.480896 4921 generic.go:334] "Generic (PLEG): container finished" podID="709b4982-f2e6-4692-ab1a-c1d5b7d507ad" containerID="d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1" exitCode=0 Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.480950 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerDied","Data":"d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.506055 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.506549 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.522063 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.522106 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.522120 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.522150 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.522174 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.529683 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.546141 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.562787 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.579843 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.600252 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.614251 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.625267 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.625307 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.625323 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.625345 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.625360 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.626789 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.644210 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.661685 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.674838 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.689730 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.704438 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.719851 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.728058 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.728116 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.728135 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.728159 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.728176 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.741864 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.771630 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.786564 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.799657 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.801714 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.811690 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.823191 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.831348 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.831414 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.831425 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.831439 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.831449 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.833086 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.845539 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.855233 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.866485 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.885321 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.909044 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.925695 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.934576 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.934617 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.934633 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.934655 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.934673 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:16Z","lastTransitionTime":"2025-12-10T12:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.939689 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.955892 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:16 crc kubenswrapper[4921]: I1210 12:57:16.972174 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.000046 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:16Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.013509 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.026640 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.037117 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.037181 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.037194 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.037213 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.037228 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.040903 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.055884 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.070990 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.086046 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.101129 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.115173 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.128100 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.139647 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.144590 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.144718 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.144844 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.144918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.144994 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.164999 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.178363 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.196044 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.220527 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.247876 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.247923 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.247935 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.247952 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.247962 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.351019 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.351056 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.351066 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.351083 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.351093 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.454745 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.454808 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.454822 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.454847 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.454866 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.491763 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" event={"ID":"709b4982-f2e6-4692-ab1a-c1d5b7d507ad","Type":"ContainerStarted","Data":"5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.492942 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.493016 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.505420 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.558517 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.558575 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.558589 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.558613 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.558632 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.565247 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.567651 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.586303 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.602718 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.622254 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.648463 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.661938 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.661994 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.662005 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.662030 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.662044 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.678435 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.694208 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.709329 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.725758 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.743298 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.757566 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.765279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.765330 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.765350 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.765374 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.765412 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.773973 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.786503 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.803847 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.831233 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.868596 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.868662 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.868679 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.868699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.868717 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.871791 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.909104 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.952262 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.971416 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.971713 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.971793 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.971946 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.972023 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:17Z","lastTransitionTime":"2025-12-10T12:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.993946 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.994123 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:17 crc kubenswrapper[4921]: E1210 12:57:17.994234 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:57:33.99418937 +0000 UTC m=+51.210411334 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:17 crc kubenswrapper[4921]: E1210 12:57:17.994249 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:17 crc kubenswrapper[4921]: E1210 12:57:17.994360 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:33.994344584 +0000 UTC m=+51.210566548 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.994520 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:17 crc kubenswrapper[4921]: E1210 12:57:17.994651 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:17 crc kubenswrapper[4921]: E1210 12:57:17.994727 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:33.994707363 +0000 UTC m=+51.210929297 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:17 crc kubenswrapper[4921]: I1210 12:57:17.997492 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:17Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.031806 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.075084 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.075151 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.075169 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.075197 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.075215 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.076986 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.096417 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.096487 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.096862 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.096905 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.096912 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.096982 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.096937 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.097006 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.097120 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:34.097089489 +0000 UTC m=+51.313311453 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.097165 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:34.09715147 +0000 UTC m=+51.313373434 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.109919 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.154264 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.178907 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.178952 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.178962 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.178980 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.178991 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.192286 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.192312 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.192807 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.192884 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.192444 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:18 crc kubenswrapper[4921]: E1210 12:57:18.193025 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.198368 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.240038 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.274323 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.281967 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.282016 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.282032 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.282052 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.282068 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.309802 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.350417 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.384619 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.384664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.384674 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.384696 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.384708 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.400080 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:18Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.487925 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.488217 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.488417 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.488750 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.488862 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.592280 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.592337 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.592349 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.592370 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.592398 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.695520 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.695578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.695595 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.695625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.695641 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.797781 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.797840 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.797853 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.797874 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.797886 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.900549 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.900606 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.900617 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.900638 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:18 crc kubenswrapper[4921]: I1210 12:57:18.900657 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:18Z","lastTransitionTime":"2025-12-10T12:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.003693 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.003739 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.003750 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.003770 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.003782 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.106439 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.106493 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.106506 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.106533 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.106552 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.208736 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.208789 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.208801 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.208821 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.208833 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.312031 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.312085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.312096 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.312116 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.312128 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.414936 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.414976 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.414989 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.415008 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.415022 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.500513 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/0.log" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.503557 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305" exitCode=1 Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.503625 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.504923 4921 scope.go:117] "RemoveContainer" containerID="bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.517588 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.517630 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.517641 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.517657 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.517668 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.525571 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.538867 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.552594 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.572667 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.595070 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.611608 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.621216 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.621256 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.621267 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.621282 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.621296 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.629239 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.646811 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.662351 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.679260 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.692543 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.709995 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.723144 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.724521 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.724570 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.724584 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.724607 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.724621 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.744263 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.757577 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:19Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.828158 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.828229 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.828247 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.828272 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.828291 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.931380 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.931505 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.931526 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.931557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:19 crc kubenswrapper[4921]: I1210 12:57:19.931576 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:19Z","lastTransitionTime":"2025-12-10T12:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.035284 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.035353 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.035371 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.035427 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.035447 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.139503 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.139603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.139626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.139657 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.139680 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.192376 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.192511 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.192560 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:20 crc kubenswrapper[4921]: E1210 12:57:20.192626 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:20 crc kubenswrapper[4921]: E1210 12:57:20.192720 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:20 crc kubenswrapper[4921]: E1210 12:57:20.192941 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.243687 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.243769 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.243793 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.243824 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.243845 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.347574 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.347632 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.347646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.347666 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.347681 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.451801 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.451864 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.451882 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.451906 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.451930 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.511844 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/0.log" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.516191 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.516882 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.533670 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.555369 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.555465 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.555488 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.555519 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.555544 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.559230 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.602480 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.620365 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.638005 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.653449 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.658882 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.658937 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.658955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.658978 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.658995 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.671037 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.688772 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.703549 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.719930 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.739536 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.761867 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.761921 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.761934 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.761953 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.761969 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.762617 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.782629 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.795280 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.821030 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:20Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.865253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.865308 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.865324 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.865366 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.865379 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.969024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.969072 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.969084 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.969106 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:20 crc kubenswrapper[4921]: I1210 12:57:20.969122 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:20Z","lastTransitionTime":"2025-12-10T12:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.072116 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.072162 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.072171 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.072192 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.072202 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.175022 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.175090 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.175109 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.175134 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.175151 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.278084 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.278171 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.278197 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.278227 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.278249 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.380602 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.380640 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.380652 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.380669 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.380681 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.398419 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2"] Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.398944 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.402369 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.402646 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.418444 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.433284 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.434605 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqtvp\" (UniqueName: \"kubernetes.io/projected/e6b3380f-1dd4-45de-9c44-eaa37f965801-kube-api-access-pqtvp\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.434882 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e6b3380f-1dd4-45de-9c44-eaa37f965801-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.434995 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e6b3380f-1dd4-45de-9c44-eaa37f965801-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.435024 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e6b3380f-1dd4-45de-9c44-eaa37f965801-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.442084 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.454564 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.473405 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.483264 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.483470 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.483582 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.483715 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.483798 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.493643 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.513557 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.530050 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.536202 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e6b3380f-1dd4-45de-9c44-eaa37f965801-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.536246 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e6b3380f-1dd4-45de-9c44-eaa37f965801-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.536277 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqtvp\" (UniqueName: \"kubernetes.io/projected/e6b3380f-1dd4-45de-9c44-eaa37f965801-kube-api-access-pqtvp\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.536333 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e6b3380f-1dd4-45de-9c44-eaa37f965801-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.537358 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e6b3380f-1dd4-45de-9c44-eaa37f965801-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.537899 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e6b3380f-1dd4-45de-9c44-eaa37f965801-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.544829 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e6b3380f-1dd4-45de-9c44-eaa37f965801-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.548907 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.555449 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqtvp\" (UniqueName: \"kubernetes.io/projected/e6b3380f-1dd4-45de-9c44-eaa37f965801-kube-api-access-pqtvp\") pod \"ovnkube-control-plane-749d76644c-wwrv2\" (UID: \"e6b3380f-1dd4-45de-9c44-eaa37f965801\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.566076 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.582491 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.586908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.586936 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.586947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.586968 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.586979 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.596636 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.610786 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.621673 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.634112 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.645125 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:21Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.689198 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.689688 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.689836 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.690009 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.690146 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.711554 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" Dec 10 12:57:21 crc kubenswrapper[4921]: W1210 12:57:21.731590 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6b3380f_1dd4_45de_9c44_eaa37f965801.slice/crio-b67a88513df3169d40b056381101860c54c938774540bc80ec9911a9131e3487 WatchSource:0}: Error finding container b67a88513df3169d40b056381101860c54c938774540bc80ec9911a9131e3487: Status 404 returned error can't find the container with id b67a88513df3169d40b056381101860c54c938774540bc80ec9911a9131e3487 Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.793221 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.793275 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.793288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.793308 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.793319 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.895687 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.895740 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.895753 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.895773 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.895786 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.998124 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.998167 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.998179 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.998198 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:21 crc kubenswrapper[4921]: I1210 12:57:21.998214 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:21Z","lastTransitionTime":"2025-12-10T12:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.101696 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.101736 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.101747 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.101764 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.101778 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.192320 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.192318 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:22 crc kubenswrapper[4921]: E1210 12:57:22.192552 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:22 crc kubenswrapper[4921]: E1210 12:57:22.193045 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.193084 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:22 crc kubenswrapper[4921]: E1210 12:57:22.193243 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.206114 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.206162 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.206174 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.206191 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.206202 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.309177 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.309247 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.309264 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.309290 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.309309 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.411921 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.411957 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.411970 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.411992 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.412008 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.515813 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-j2nnf"] Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.515918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.516008 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.516033 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.516065 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.516090 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.516494 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:22 crc kubenswrapper[4921]: E1210 12:57:22.516559 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.528882 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" event={"ID":"e6b3380f-1dd4-45de-9c44-eaa37f965801","Type":"ContainerStarted","Data":"b67a88513df3169d40b056381101860c54c938774540bc80ec9911a9131e3487"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.546074 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.549611 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.549692 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9vnm\" (UniqueName: \"kubernetes.io/projected/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-kube-api-access-b9vnm\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.579932 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.608318 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.620239 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.620313 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.620339 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.620374 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.620445 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.623912 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.642963 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.651069 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.651161 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9vnm\" (UniqueName: \"kubernetes.io/projected/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-kube-api-access-b9vnm\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:22 crc kubenswrapper[4921]: E1210 12:57:22.651297 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:22 crc kubenswrapper[4921]: E1210 12:57:22.651413 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:23.151365541 +0000 UTC m=+40.367587475 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.662118 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.677846 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.681603 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9vnm\" (UniqueName: \"kubernetes.io/projected/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-kube-api-access-b9vnm\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.698099 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.717611 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.723354 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.723420 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.723436 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.723490 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.723502 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.744510 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.763001 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.775030 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.794373 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.809604 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.824187 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.826042 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.826082 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.826091 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.826106 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.826117 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.836507 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.846518 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:22Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.929261 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.929322 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.929340 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.929367 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:22 crc kubenswrapper[4921]: I1210 12:57:22.929385 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:22Z","lastTransitionTime":"2025-12-10T12:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.032978 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.033023 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.033039 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.033062 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.033075 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.136025 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.136069 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.136078 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.136095 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.136107 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.156555 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:23 crc kubenswrapper[4921]: E1210 12:57:23.156769 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:23 crc kubenswrapper[4921]: E1210 12:57:23.156861 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:24.156839985 +0000 UTC m=+41.373061909 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.231258 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.240497 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.240546 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.240560 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.240580 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.240594 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.252440 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.264716 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.280923 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.300256 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.316998 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.334715 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.343642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.343719 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.343732 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.343755 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.343775 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.353095 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.377432 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.396533 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.412653 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.435707 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.452771 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.453456 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.453562 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.453593 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.453619 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.454847 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.469130 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.484895 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.498830 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.512025 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.534816 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" event={"ID":"e6b3380f-1dd4-45de-9c44-eaa37f965801","Type":"ContainerStarted","Data":"18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.537109 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/1.log" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.537978 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/0.log" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.542082 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff" exitCode=1 Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.542135 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.542184 4921 scope.go:117] "RemoveContainer" containerID="bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.542867 4921 scope.go:117] "RemoveContainer" containerID="c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff" Dec 10 12:57:23 crc kubenswrapper[4921]: E1210 12:57:23.543061 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.556162 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.556193 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.556204 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.556218 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.556229 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.558994 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.571529 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.584225 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.603704 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.617787 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.632039 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.651506 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.659112 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.659157 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.659174 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.659198 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.659215 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.666341 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.684559 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.702565 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.719500 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.745190 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.761541 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.761611 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.761628 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.761651 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.761665 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.764586 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.781783 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.796452 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.814508 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.830030 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:23Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.864744 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.865099 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.865186 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.865288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.865433 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.968114 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.968817 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.968924 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.969015 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:23 crc kubenswrapper[4921]: I1210 12:57:23.969106 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:23Z","lastTransitionTime":"2025-12-10T12:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.071981 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.072402 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.072515 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.072600 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.072701 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.168342 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:24 crc kubenswrapper[4921]: E1210 12:57:24.168603 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:24 crc kubenswrapper[4921]: E1210 12:57:24.168687 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:26.168659376 +0000 UTC m=+43.384881330 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.176434 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.176694 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.176831 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.176967 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.177135 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.192648 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.192647 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:24 crc kubenswrapper[4921]: E1210 12:57:24.192951 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.192708 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:24 crc kubenswrapper[4921]: E1210 12:57:24.193498 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.192711 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:24 crc kubenswrapper[4921]: E1210 12:57:24.193122 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:24 crc kubenswrapper[4921]: E1210 12:57:24.194348 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.279981 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.280273 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.280372 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.280469 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.280534 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.383136 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.383192 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.383203 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.383219 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.383231 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.486850 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.486930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.486957 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.486985 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.487009 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.548920 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" event={"ID":"e6b3380f-1dd4-45de-9c44-eaa37f965801","Type":"ContainerStarted","Data":"49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.552142 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/1.log" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.568815 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.584876 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.590870 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.590899 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.590911 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.590930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.590945 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.604573 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.619852 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.634853 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.655323 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.671226 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.687143 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.693513 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.693596 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.693670 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.693698 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.693715 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.718492 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.736846 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.758110 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.782243 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.796988 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.797037 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.797050 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.797070 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.797082 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.812495 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.828282 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.844602 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.859416 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.877741 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:24Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.899859 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.900178 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.900304 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.900444 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:24 crc kubenswrapper[4921]: I1210 12:57:24.900547 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:24Z","lastTransitionTime":"2025-12-10T12:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.004115 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.004178 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.004196 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.004223 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.004241 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.037365 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.037502 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.037527 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.037601 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.037624 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: E1210 12:57:25.061056 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:25Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.067068 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.067161 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.067190 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.067241 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.067267 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: E1210 12:57:25.090544 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:25Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.097147 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.097222 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.097240 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.097268 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.097286 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: E1210 12:57:25.120024 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:25Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.127750 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.127831 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.127856 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.127892 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.127916 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: E1210 12:57:25.152061 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:25Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.158325 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.158385 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.158438 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.158467 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.158486 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: E1210 12:57:25.181753 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:25Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:25 crc kubenswrapper[4921]: E1210 12:57:25.181992 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.184540 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.184600 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.184620 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.184649 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.184686 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.288498 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.288588 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.288609 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.288640 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.288661 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.397738 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.397807 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.397823 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.397853 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.397871 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.501788 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.501865 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.501916 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.501944 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.501966 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.606074 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.606695 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.607292 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.607602 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.607784 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.712019 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.712078 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.712096 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.712119 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.712135 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.815949 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.816029 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.816047 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.816075 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.816094 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.920279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.920370 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.920427 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.920468 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:25 crc kubenswrapper[4921]: I1210 12:57:25.920496 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:25Z","lastTransitionTime":"2025-12-10T12:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.023900 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.024716 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.024886 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.025032 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.025152 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.129188 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.129262 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.129275 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.129294 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.129308 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.192267 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.192462 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.192476 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:26 crc kubenswrapper[4921]: E1210 12:57:26.192588 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.192615 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:26 crc kubenswrapper[4921]: E1210 12:57:26.192727 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:26 crc kubenswrapper[4921]: E1210 12:57:26.192798 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:30.192779327 +0000 UTC m=+47.409001261 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.192616 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:26 crc kubenswrapper[4921]: E1210 12:57:26.192808 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:26 crc kubenswrapper[4921]: E1210 12:57:26.192922 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:26 crc kubenswrapper[4921]: E1210 12:57:26.192996 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.232375 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.232701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.232847 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.232942 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.233024 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.335958 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.336013 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.336027 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.336045 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.336059 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.440243 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.440332 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.440360 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.440439 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.440465 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.543168 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.543208 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.543218 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.543260 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.543275 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.645500 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.645558 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.645569 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.645588 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.645600 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.748943 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.749023 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.749037 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.749080 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.749092 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.851845 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.851902 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.851915 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.851935 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.851950 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.955173 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.955680 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.955880 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.956092 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:26 crc kubenswrapper[4921]: I1210 12:57:26.956254 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:26Z","lastTransitionTime":"2025-12-10T12:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.059798 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.060466 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.060494 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.060515 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.060528 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.163433 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.163476 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.163485 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.163500 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.163511 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.266573 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.266681 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.266693 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.266713 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.266726 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.369663 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.369704 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.369714 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.369733 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.369746 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.472595 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.472637 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.472646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.472661 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.472674 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.575291 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.575351 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.575365 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.575410 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.575432 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.678410 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.678458 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.678473 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.678494 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.678513 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.781024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.781081 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.781099 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.781124 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.781145 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.884193 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.884250 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.884263 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.884321 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.884337 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.987741 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.987807 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.987817 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.987835 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:27 crc kubenswrapper[4921]: I1210 12:57:27.987856 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:27Z","lastTransitionTime":"2025-12-10T12:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.091975 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.092053 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.092076 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.092108 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.092129 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.191885 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.191951 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:28 crc kubenswrapper[4921]: E1210 12:57:28.192073 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:28 crc kubenswrapper[4921]: E1210 12:57:28.192231 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.192352 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.192365 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:28 crc kubenswrapper[4921]: E1210 12:57:28.192579 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:28 crc kubenswrapper[4921]: E1210 12:57:28.193873 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.196170 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.196329 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.196462 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.196567 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.196656 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.301044 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.301142 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.301159 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.301214 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.301233 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.405326 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.405433 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.405451 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.405476 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.405495 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.510198 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.510249 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.510266 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.510290 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.510309 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.614468 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.614521 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.614539 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.614566 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.614584 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.717812 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.717910 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.717925 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.717947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.717962 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.821188 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.821250 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.821271 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.821304 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.821330 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.925607 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.925681 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.925699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.925725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:28 crc kubenswrapper[4921]: I1210 12:57:28.925743 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:28Z","lastTransitionTime":"2025-12-10T12:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.028868 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.028922 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.028940 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.028965 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.028984 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.132579 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.132642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.132653 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.132681 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.132694 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.235764 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.235859 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.235879 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.235912 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.235934 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.338973 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.339026 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.339040 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.339064 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.339089 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.441805 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.441862 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.441875 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.441898 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.441912 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.545723 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.545773 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.545786 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.545834 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.545864 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.648822 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.648868 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.648878 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.648896 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.648909 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.751640 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.751683 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.751691 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.751705 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.751715 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.854507 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.854567 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.854578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.854596 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.854607 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.957652 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.957704 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.957717 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.957737 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:29 crc kubenswrapper[4921]: I1210 12:57:29.957751 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:29Z","lastTransitionTime":"2025-12-10T12:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.061297 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.061358 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.061371 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.061413 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.061428 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.165572 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.165650 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.165666 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.165686 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.165703 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.191983 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.192062 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.192110 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.192259 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:30 crc kubenswrapper[4921]: E1210 12:57:30.192442 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:30 crc kubenswrapper[4921]: E1210 12:57:30.192687 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:30 crc kubenswrapper[4921]: E1210 12:57:30.192831 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:30 crc kubenswrapper[4921]: E1210 12:57:30.193017 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.238937 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:30 crc kubenswrapper[4921]: E1210 12:57:30.239164 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:30 crc kubenswrapper[4921]: E1210 12:57:30.239338 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:38.239296778 +0000 UTC m=+55.455518862 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.268709 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.268747 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.268759 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.268784 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.268799 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.372017 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.372097 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.372118 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.372147 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.372167 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.475930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.476010 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.476028 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.476069 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.476084 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.580314 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.580430 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.580454 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.580484 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.580501 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.684285 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.684367 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.684418 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.684450 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.684471 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.789218 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.789309 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.789337 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.789368 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.789431 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.893689 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.893775 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.893791 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.893819 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.893992 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.997348 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.997413 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.997425 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.997448 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:30 crc kubenswrapper[4921]: I1210 12:57:30.997462 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:30Z","lastTransitionTime":"2025-12-10T12:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.101321 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.101368 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.101381 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.101417 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.101433 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.203741 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.203791 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.203803 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.204005 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.204021 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.307672 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.307767 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.307786 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.307848 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.307874 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.412098 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.412240 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.412259 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.412289 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.412311 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.516188 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.516266 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.516284 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.516312 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.516333 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.619494 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.619562 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.619585 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.619615 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.619637 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.722774 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.722851 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.722874 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.722907 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.722934 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.826420 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.826478 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.826494 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.826516 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.826530 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.930127 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.930189 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.930207 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.930235 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:31 crc kubenswrapper[4921]: I1210 12:57:31.930255 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:31Z","lastTransitionTime":"2025-12-10T12:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.033524 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.033578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.033595 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.033618 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.033635 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.136478 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.136621 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.136646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.136676 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.136695 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.192686 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.192900 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.193020 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:32 crc kubenswrapper[4921]: E1210 12:57:32.193016 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.193096 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:32 crc kubenswrapper[4921]: E1210 12:57:32.193298 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:32 crc kubenswrapper[4921]: E1210 12:57:32.193476 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:32 crc kubenswrapper[4921]: E1210 12:57:32.193561 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.240620 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.240687 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.240707 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.240734 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.240752 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.344085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.344140 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.344155 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.344175 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.344188 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.447283 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.447333 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.447345 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.447362 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.447374 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.550926 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.550975 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.550984 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.551001 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.551014 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.654315 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.654409 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.654424 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.654443 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.654459 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.757362 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.757422 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.757435 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.757456 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.757466 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.860440 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.860480 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.860489 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.860503 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.860515 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.964452 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.964502 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.964512 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.964533 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:32 crc kubenswrapper[4921]: I1210 12:57:32.964545 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:32Z","lastTransitionTime":"2025-12-10T12:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.067449 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.067495 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.067510 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.067531 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.067547 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.170918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.170977 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.170986 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.171006 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.171018 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.206229 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.216153 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.225845 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.237033 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.255593 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf05f49208bee2d3d43564c1f88b49d4481b48bbb9afe2108c1eea92e4531305\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:19Z\\\",\\\"message\\\":\\\":57:18.800406 6129 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 12:57:18.800433 6129 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 12:57:18.800441 6129 factory.go:656] Stopping watch factory\\\\nI1210 12:57:18.800465 6129 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 12:57:18.800466 6129 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 12:57:18.800662 6129 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801063 6129 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 12:57:18.801086 6129 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 12:57:18.801163 6129 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801631 6129 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801721 6129 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 12:57:18.801837 6129 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.268593 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.273601 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.273646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.273661 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.273683 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.273698 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.287246 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.302904 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.317108 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.330276 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.343477 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.364172 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.376387 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.376466 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.376480 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.376501 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.376519 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.388009 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.404469 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.417124 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.432933 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.449199 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:33Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.482816 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.482915 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.482935 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.482967 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.482999 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.585349 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.585484 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.585499 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.585526 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.585542 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.688464 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.688543 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.688566 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.688718 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.688750 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.791591 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.791645 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.791656 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.791672 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.791684 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.894642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.894699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.894713 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.894734 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.894750 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.997562 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.997615 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.997631 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.997670 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:33 crc kubenswrapper[4921]: I1210 12:57:33.997689 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:33Z","lastTransitionTime":"2025-12-10T12:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.087157 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.087293 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.087339 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.087490 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.087573 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:58:06.087552055 +0000 UTC m=+83.303773979 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.087825 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.087869 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:58:06.087859063 +0000 UTC m=+83.304080987 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.088343 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:06.088300594 +0000 UTC m=+83.304522528 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.099912 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.099946 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.099954 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.099969 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.099980 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.187907 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.187963 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188099 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188120 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188133 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188196 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:58:06.188177586 +0000 UTC m=+83.404399510 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188194 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188244 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188260 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.188359 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:58:06.18833407 +0000 UTC m=+83.404556034 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.191785 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.191932 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.191960 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.191840 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.192413 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.192793 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.192929 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.192886 4921 scope.go:117] "RemoveContainer" containerID="c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff" Dec 10 12:57:34 crc kubenswrapper[4921]: E1210 12:57:34.192869 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.201855 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.202114 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.202153 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.202168 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.202178 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.208072 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.223227 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.239687 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.251286 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.266144 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.294733 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.304890 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.304944 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.304957 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.304978 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.304992 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.314177 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.328254 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.350139 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.369916 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.386769 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.405862 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.407432 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.407480 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.407495 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.407519 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.407537 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.421739 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.436629 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.458348 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.482744 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.500885 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:34Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.510538 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.510600 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.510615 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.510635 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.510648 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.613580 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.614001 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.614160 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.614315 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.614488 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.717378 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.717431 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.717445 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.717461 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.717474 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.821239 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.821294 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.821305 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.821323 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.821335 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.924625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.924675 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.924686 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.924709 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:34 crc kubenswrapper[4921]: I1210 12:57:34.924724 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:34Z","lastTransitionTime":"2025-12-10T12:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.027214 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.027274 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.027285 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.027302 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.027312 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.131027 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.131075 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.131088 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.131110 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.131125 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.234212 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.234246 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.234255 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.234272 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.234284 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.241299 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.241367 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.241404 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.241429 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.241446 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: E1210 12:57:35.261840 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.266944 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.267061 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.267302 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.267328 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.267341 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.278032 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 12:57:35 crc kubenswrapper[4921]: E1210 12:57:35.281817 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.285310 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.285359 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.285368 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.285383 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.285406 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.288118 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.294130 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: E1210 12:57:35.301953 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.305062 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.305872 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.305899 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.305908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.305924 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.305934 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.316121 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: E1210 12:57:35.318318 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.321355 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.321387 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.321412 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.321430 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.321442 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.330373 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: E1210 12:57:35.333247 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: E1210 12:57:35.333358 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.337085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.337137 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.337150 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.337169 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.337182 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.341758 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.354892 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.375041 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.387891 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.409768 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.429330 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.439417 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.439543 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.439557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.439581 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.439617 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.458493 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.474302 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.489776 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.503059 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.516833 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.532462 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.542271 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.542320 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.542329 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.542347 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.542358 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.544537 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.605160 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/1.log" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.608152 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.623454 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.637976 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.644896 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.644949 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.644963 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.644985 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.644996 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.652168 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.666211 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.679711 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.694713 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.722046 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.738864 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.748340 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.748424 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.748438 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.748458 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.748474 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.756264 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.770690 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.795440 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.814336 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.830376 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.843422 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.851723 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.851811 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.851828 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.851874 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.851891 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.858703 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.878500 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.894436 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.910432 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:35Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.954867 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.954934 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.954947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.954971 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:35 crc kubenswrapper[4921]: I1210 12:57:35.954983 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:35Z","lastTransitionTime":"2025-12-10T12:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.058377 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.058475 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.058490 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.058523 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.058544 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.162149 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.162210 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.162229 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.162252 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.162269 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.192718 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.192774 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.192811 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.192821 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:36 crc kubenswrapper[4921]: E1210 12:57:36.193544 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:36 crc kubenswrapper[4921]: E1210 12:57:36.193877 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:36 crc kubenswrapper[4921]: E1210 12:57:36.194195 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:36 crc kubenswrapper[4921]: E1210 12:57:36.194689 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.265641 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.265708 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.265733 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.265767 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.265790 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.369228 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.369296 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.369315 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.369338 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.369356 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.471619 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.471983 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.472146 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.472294 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.472457 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.575887 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.575946 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.575955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.575976 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.575988 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.617958 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/2.log" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.620375 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/1.log" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.623434 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" exitCode=1 Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.623485 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.623524 4921 scope.go:117] "RemoveContainer" containerID="c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.624325 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 12:57:36 crc kubenswrapper[4921]: E1210 12:57:36.624486 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.649825 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.671882 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.678234 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.678269 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.678279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.678320 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.678332 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.695331 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.709051 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.720035 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.731748 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.743807 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.755016 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.765813 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.776452 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.780534 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.780558 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.780567 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.780624 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.780636 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.789376 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.808533 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7cae279c3cb664f0de50f9e6f8e88378d8878555863051da2626693c4337cff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:23Z\\\",\\\"message\\\":\\\"be-controller-manager/kube-controller-manager-crc openshift-ovn-kubernetes/ovnkube-node-m7n89 openshift-dns/node-resolver-zmks6 openshift-etcd/etcd-crc openshift-multus/multus-pqlx4 openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1210 12:57:21.359087 6291 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 12:57:21.359102 6291 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359112 6291 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359124 6291 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1210 12:57:21.359130 6291 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1210 12:57:21.359134 6291 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1210 12:57:21.359147 6291 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1210 12:57:21.359215 6291 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.821687 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.831514 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.845017 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.856263 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.866719 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.878642 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:36Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.884003 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.884044 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.884070 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.884092 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.884107 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.987351 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.987403 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.987413 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.987431 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:36 crc kubenswrapper[4921]: I1210 12:57:36.987446 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:36Z","lastTransitionTime":"2025-12-10T12:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.090618 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.090656 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.090665 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.090682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.090694 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.193628 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.193675 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.193686 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.193701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.193713 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.295624 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.295679 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.295694 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.295714 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.295726 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.398181 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.398223 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.398233 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.398250 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.398261 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.501954 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.502026 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.502043 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.502070 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.502087 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.605598 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.605705 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.605734 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.605771 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.606515 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.630580 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/2.log" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.710457 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.710513 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.710528 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.710551 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.710566 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.813877 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.813946 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.813956 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.813975 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.813986 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.917069 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.917132 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.917152 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.917183 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:37 crc kubenswrapper[4921]: I1210 12:57:37.917206 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:37Z","lastTransitionTime":"2025-12-10T12:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.019704 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.019777 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.019814 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.019848 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.019872 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.122751 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.122816 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.122827 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.122845 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.122856 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.192678 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.192724 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.192741 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.192801 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:38 crc kubenswrapper[4921]: E1210 12:57:38.193014 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:38 crc kubenswrapper[4921]: E1210 12:57:38.193203 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:38 crc kubenswrapper[4921]: E1210 12:57:38.193312 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:38 crc kubenswrapper[4921]: E1210 12:57:38.193382 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.225980 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.226045 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.226057 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.226078 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.226091 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.264787 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:38 crc kubenswrapper[4921]: E1210 12:57:38.265061 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:38 crc kubenswrapper[4921]: E1210 12:57:38.265189 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:57:54.265159483 +0000 UTC m=+71.481381607 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.329485 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.329578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.329603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.329631 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.329652 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.432996 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.433055 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.433066 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.433090 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.433113 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.536699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.536768 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.536794 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.536817 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.536843 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.639140 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.639195 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.639204 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.639222 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.639235 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.742189 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.742356 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.742385 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.742482 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.742504 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.846450 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.846529 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.846549 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.846581 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.846602 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.949664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.949736 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.949749 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.949769 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:38 crc kubenswrapper[4921]: I1210 12:57:38.949782 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:38Z","lastTransitionTime":"2025-12-10T12:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.052794 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.052845 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.052862 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.052884 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.052900 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.155826 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.155864 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.155889 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.155904 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.155916 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.258610 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.258680 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.258699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.258727 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.258745 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.362475 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.362539 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.362609 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.362639 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.362660 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.466598 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.467286 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.467377 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.467557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.467647 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.571234 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.571299 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.571316 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.571344 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.571362 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.674190 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.674506 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.674601 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.674681 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.674745 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.777561 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.777662 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.777689 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.777717 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.777736 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.881652 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.881736 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.881758 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.881786 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.881805 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.984974 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.985027 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.985038 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.985058 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:39 crc kubenswrapper[4921]: I1210 12:57:39.985069 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:39Z","lastTransitionTime":"2025-12-10T12:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.088859 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.088912 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.088930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.088955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.088974 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.192820 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.192864 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.192876 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.192894 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.192910 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.211158 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:40 crc kubenswrapper[4921]: E1210 12:57:40.211492 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.211663 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:40 crc kubenswrapper[4921]: E1210 12:57:40.211934 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.212097 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:40 crc kubenswrapper[4921]: E1210 12:57:40.212210 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.211178 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:40 crc kubenswrapper[4921]: E1210 12:57:40.212364 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.296581 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.296670 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.296692 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.296725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.296748 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.400154 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.400226 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.400249 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.400276 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.400297 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.502832 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.503811 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.504100 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.504416 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.504643 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.608238 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.608586 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.608600 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.608619 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.608634 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.712038 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.712103 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.712125 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.712154 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.712174 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.815352 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.815972 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.816245 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.816520 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.816759 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.920035 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.920080 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.920091 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.920105 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:40 crc kubenswrapper[4921]: I1210 12:57:40.920118 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:40Z","lastTransitionTime":"2025-12-10T12:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.022782 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.023135 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.023265 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.023428 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.023580 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.127201 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.128014 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.128212 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.128428 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.128624 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.231853 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.232210 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.232310 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.232457 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.232571 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.335227 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.335279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.335292 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.335309 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.335318 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.438273 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.438324 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.438338 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.438359 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.438374 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.541864 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.541923 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.541935 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.541959 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.541974 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.644479 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.644550 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.644573 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.644600 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.644620 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.748507 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.748576 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.748595 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.748621 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.748640 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.852350 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.852455 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.852474 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.852502 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.852520 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.954918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.955000 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.955019 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.955050 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:41 crc kubenswrapper[4921]: I1210 12:57:41.955070 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:41Z","lastTransitionTime":"2025-12-10T12:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.058338 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.058557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.058603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.058632 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.058655 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.161329 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.161503 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.161545 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.161575 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.161603 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.191846 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.191923 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:42 crc kubenswrapper[4921]: E1210 12:57:42.192035 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.191879 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.192108 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:42 crc kubenswrapper[4921]: E1210 12:57:42.192289 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:42 crc kubenswrapper[4921]: E1210 12:57:42.192483 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:42 crc kubenswrapper[4921]: E1210 12:57:42.192618 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.264165 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.264211 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.264220 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.264239 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.264250 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.367608 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.367682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.367704 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.367732 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.367751 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.471285 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.471325 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.471337 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.471354 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.471365 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.573279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.573323 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.573335 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.573353 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.573367 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.637353 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.638364 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 12:57:42 crc kubenswrapper[4921]: E1210 12:57:42.638690 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.656111 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.667164 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.676625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.676662 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.676672 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.676691 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.676703 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.689880 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.726613 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.739590 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.752915 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.764437 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.778941 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.778982 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.778992 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.779011 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.779024 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.782627 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.794896 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.808707 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.820976 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.835001 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.849248 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.866360 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.882159 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.882242 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.882263 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.882272 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.882288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.882313 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.897751 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.912330 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.925961 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:42Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.985620 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.985680 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.985694 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.985716 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:42 crc kubenswrapper[4921]: I1210 12:57:42.985734 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:42Z","lastTransitionTime":"2025-12-10T12:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.089479 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.089547 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.089567 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.089592 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.089610 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.192550 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.192623 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.192642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.192670 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.192686 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.218183 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.238307 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.254221 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.271864 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.294807 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.296417 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.296458 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.296469 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.296488 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.296502 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.311983 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.330932 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.352527 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.367939 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.386199 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.399497 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.399543 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.399556 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.399577 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.399593 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.417597 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.437788 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.454416 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.480891 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.502589 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.502632 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.502645 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.502665 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.502678 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.505715 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.522049 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.538181 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.553585 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:43Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.605608 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.605648 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.605656 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.605670 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.605679 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.707992 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.708035 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.708046 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.708062 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.708072 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.811988 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.812127 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.812147 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.812208 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.812227 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.915700 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.915793 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.915812 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.915840 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:43 crc kubenswrapper[4921]: I1210 12:57:43.915859 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:43Z","lastTransitionTime":"2025-12-10T12:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.024340 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.024379 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.024408 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.024425 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.024437 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.127552 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.127609 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.127625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.127649 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.127669 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.192482 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.192585 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.192622 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:44 crc kubenswrapper[4921]: E1210 12:57:44.192669 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.192586 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:44 crc kubenswrapper[4921]: E1210 12:57:44.192802 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:44 crc kubenswrapper[4921]: E1210 12:57:44.192907 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:44 crc kubenswrapper[4921]: E1210 12:57:44.193020 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.230356 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.230428 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.230470 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.230491 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.230503 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.333276 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.333381 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.333419 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.333441 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.333455 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.435510 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.435557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.435567 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.435584 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.435597 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.539051 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.539103 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.539113 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.539130 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.539142 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.642008 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.642045 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.642054 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.642069 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.642080 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.744070 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.744112 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.744125 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.744141 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.744155 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.846530 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.846576 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.846584 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.846602 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.846613 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.949840 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.949909 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.949923 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.949945 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:44 crc kubenswrapper[4921]: I1210 12:57:44.949959 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:44Z","lastTransitionTime":"2025-12-10T12:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.053372 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.053456 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.053468 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.053520 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.053537 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.163579 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.163636 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.163647 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.163667 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.163680 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.265989 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.266034 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.266047 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.266065 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.266079 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.369230 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.369276 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.369288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.369304 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.369315 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.473275 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.473308 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.473316 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.473331 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.473341 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.576583 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.576616 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.576626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.576643 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.576654 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.643253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.643288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.643297 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.643313 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.643325 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: E1210 12:57:45.656268 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:45Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.659486 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.659520 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.659531 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.659550 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.659567 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: E1210 12:57:45.672435 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:45Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.676478 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.676530 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.676540 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.676559 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.676571 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: E1210 12:57:45.689448 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:45Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.693249 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.693286 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.693297 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.693315 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.693327 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: E1210 12:57:45.704537 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:45Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.708072 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.708113 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.708129 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.708152 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.708168 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: E1210 12:57:45.725479 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:45Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:45 crc kubenswrapper[4921]: E1210 12:57:45.725708 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.727255 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.727283 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.727294 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.727311 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.727322 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.829725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.829767 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.829779 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.829799 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.829813 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.931656 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.931732 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.931767 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.931797 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:45 crc kubenswrapper[4921]: I1210 12:57:45.931819 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:45Z","lastTransitionTime":"2025-12-10T12:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.033948 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.034001 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.034016 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.034037 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.034049 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.135843 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.135873 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.135882 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.135896 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.135905 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.192448 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.192577 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:46 crc kubenswrapper[4921]: E1210 12:57:46.192621 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.192664 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.192773 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:46 crc kubenswrapper[4921]: E1210 12:57:46.192805 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:46 crc kubenswrapper[4921]: E1210 12:57:46.192912 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:46 crc kubenswrapper[4921]: E1210 12:57:46.192987 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.239002 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.239047 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.239057 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.239073 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.239082 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.342020 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.342079 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.342092 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.342112 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.342126 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.444652 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.444699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.444708 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.444725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.444739 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.547682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.547727 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.547742 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.547760 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.547773 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.650842 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.650892 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.650903 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.650920 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.650931 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.753672 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.753733 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.753746 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.753764 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.753777 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.855765 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.855848 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.855871 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.855888 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.855900 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.958866 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.958904 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.958912 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.958928 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:46 crc kubenswrapper[4921]: I1210 12:57:46.958938 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:46Z","lastTransitionTime":"2025-12-10T12:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.062048 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.062107 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.062167 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.062224 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.062240 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.165708 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.165763 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.165778 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.165799 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.165816 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.268226 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.268279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.268291 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.268311 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.268321 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.371152 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.371216 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.371228 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.371246 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.371258 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.474834 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.474888 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.474899 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.474918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.474930 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.577246 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.577280 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.577288 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.577302 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.577315 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.679622 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.679661 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.679669 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.679686 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.679697 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.781845 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.781884 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.781896 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.781911 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.781921 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.884553 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.884611 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.884624 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.884646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.884660 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.987034 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.987089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.987098 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.987116 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:47 crc kubenswrapper[4921]: I1210 12:57:47.987127 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:47Z","lastTransitionTime":"2025-12-10T12:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.090166 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.090210 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.090220 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.090236 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.090248 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192298 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192376 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192416 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192298 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:48 crc kubenswrapper[4921]: E1210 12:57:48.192474 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:48 crc kubenswrapper[4921]: E1210 12:57:48.192652 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:48 crc kubenswrapper[4921]: E1210 12:57:48.192581 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:48 crc kubenswrapper[4921]: E1210 12:57:48.192800 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192909 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192978 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.192996 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.193025 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.193044 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.295528 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.295591 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.295603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.295623 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.295636 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.398267 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.398306 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.398317 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.398341 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.398353 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.500382 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.500451 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.500460 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.500476 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.500487 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.603071 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.603115 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.603125 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.603142 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.603152 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.705641 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.705684 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.705695 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.705711 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.705722 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.808223 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.808258 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.808268 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.808284 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.808297 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.911195 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.911243 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.911255 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.911275 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:48 crc kubenswrapper[4921]: I1210 12:57:48.911292 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:48Z","lastTransitionTime":"2025-12-10T12:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.014502 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.014550 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.014563 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.014582 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.014596 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.116874 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.116929 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.116938 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.116975 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.116987 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.220028 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.220056 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.220065 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.220080 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.220089 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.322979 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.323033 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.323046 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.323068 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.323081 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.427047 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.427093 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.427367 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.427426 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.427441 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.530719 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.530787 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.530799 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.530815 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.530827 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.633286 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.633341 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.633349 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.633366 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.633375 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.736160 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.736198 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.736210 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.736226 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.736239 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.840085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.840134 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.840146 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.840165 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.840178 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.942605 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.942661 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.942672 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.942691 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:49 crc kubenswrapper[4921]: I1210 12:57:49.942702 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:49Z","lastTransitionTime":"2025-12-10T12:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.044955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.045001 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.045010 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.045027 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.045037 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.147610 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.147701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.147717 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.147736 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.147751 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.191969 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.192021 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.192052 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.192107 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:50 crc kubenswrapper[4921]: E1210 12:57:50.192119 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:50 crc kubenswrapper[4921]: E1210 12:57:50.192187 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:50 crc kubenswrapper[4921]: E1210 12:57:50.192276 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:50 crc kubenswrapper[4921]: E1210 12:57:50.192379 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.250780 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.251048 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.251114 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.251213 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.251304 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.354553 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.354603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.354613 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.354629 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.354642 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.457059 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.457096 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.457104 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.457122 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.457134 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.559483 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.559738 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.559840 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.559908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.559978 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.663095 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.663148 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.663160 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.663176 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.663204 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.767079 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.767144 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.767157 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.767200 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.767217 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.870727 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.870780 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.870792 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.870811 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.870823 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.973603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.973657 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.973670 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.973693 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:50 crc kubenswrapper[4921]: I1210 12:57:50.973706 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:50Z","lastTransitionTime":"2025-12-10T12:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.076201 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.076241 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.076250 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.076269 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.076279 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.179544 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.179659 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.179676 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.179701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.179719 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.282863 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.282928 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.282947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.282974 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.282992 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.385981 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.386053 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.386067 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.386089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.386101 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.488889 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.488951 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.488965 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.488990 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.489002 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.591668 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.591714 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.591724 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.591741 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.591752 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.694186 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.694240 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.694250 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.694267 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.694278 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.796954 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.797019 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.797030 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.797047 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.797059 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.900663 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.900711 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.900722 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.900740 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:51 crc kubenswrapper[4921]: I1210 12:57:51.900753 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:51Z","lastTransitionTime":"2025-12-10T12:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.004724 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.004775 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.004784 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.004802 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.004816 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.107946 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.107992 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.108003 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.108022 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.108034 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.192778 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.192820 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.192849 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:52 crc kubenswrapper[4921]: E1210 12:57:52.193425 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:52 crc kubenswrapper[4921]: E1210 12:57:52.193204 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.192960 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:52 crc kubenswrapper[4921]: E1210 12:57:52.193522 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:52 crc kubenswrapper[4921]: E1210 12:57:52.193553 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.210823 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.211126 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.211345 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.211532 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.211738 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.314656 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.314701 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.314711 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.314729 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.314739 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.417113 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.417147 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.417156 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.417171 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.417180 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.519501 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.519531 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.519540 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.519555 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.519565 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.622419 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.622462 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.622471 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.622489 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.622500 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.725547 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.725595 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.725605 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.725622 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.725634 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.828022 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.828078 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.828091 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.828110 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.828122 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.930340 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.930425 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.930445 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.930468 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:52 crc kubenswrapper[4921]: I1210 12:57:52.930483 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:52Z","lastTransitionTime":"2025-12-10T12:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.033485 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.033534 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.033544 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.033565 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.033578 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.136429 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.136480 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.136493 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.136516 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.136527 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.212702 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.225715 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.238003 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.239144 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.239165 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.239176 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.239192 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.239202 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.250338 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.261012 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.280020 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.301881 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.313789 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.324531 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.343472 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.343525 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.343537 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.343556 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.343567 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.343906 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.355475 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.367833 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.380644 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.395892 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.411462 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.423521 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.433856 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.442740 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:53Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.446786 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.446818 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.447012 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.447030 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.447043 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.550157 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.550204 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.550214 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.550233 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.550247 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.653186 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.653611 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.653762 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.653908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.654043 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.756964 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.757021 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.757032 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.757054 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.757068 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.859666 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.860331 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.860445 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.860542 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.860648 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.963407 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.963468 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.963481 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.963511 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:53 crc kubenswrapper[4921]: I1210 12:57:53.963528 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:53Z","lastTransitionTime":"2025-12-10T12:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.066205 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.066251 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.066260 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.066282 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.066295 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.171852 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.171898 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.171910 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.171933 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.171950 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.191926 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.191927 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.192127 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.191982 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.191954 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.192252 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.192382 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.192529 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.193683 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.193950 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.268040 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.268264 4921 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:54 crc kubenswrapper[4921]: E1210 12:57:54.268792 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs podName:9cc656f0-ce36-474b-9fa3-1ce9f43675a4 nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.268768368 +0000 UTC m=+103.484990292 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs") pod "network-metrics-daemon-j2nnf" (UID: "9cc656f0-ce36-474b-9fa3-1ce9f43675a4") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.274441 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.274487 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.274501 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.274520 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.274533 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.376357 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.376413 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.376422 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.376437 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.376447 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.478689 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.478735 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.478745 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.478763 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.478776 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.580565 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.580598 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.580608 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.580626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.580638 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.682900 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.683012 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.683036 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.683065 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.683084 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.784955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.785239 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.785334 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.785430 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.785526 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.889373 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.889821 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.889940 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.890145 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.890285 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.992737 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.992998 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.993109 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.993217 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:54 crc kubenswrapper[4921]: I1210 12:57:54.993306 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:54Z","lastTransitionTime":"2025-12-10T12:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.096080 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.096130 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.096140 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.096155 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.096165 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.197753 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.197849 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.197894 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.197921 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.197972 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.301581 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.301831 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.301938 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.302025 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.302103 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.405176 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.405518 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.405614 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.405711 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.405796 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.509183 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.509233 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.509244 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.509264 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.509276 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.613179 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.613256 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.613279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.613310 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.613330 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.698544 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pqlx4_78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e/kube-multus/0.log" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.698599 4921 generic.go:334] "Generic (PLEG): container finished" podID="78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e" containerID="480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709" exitCode=1 Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.698636 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pqlx4" event={"ID":"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e","Type":"ContainerDied","Data":"480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.699165 4921 scope.go:117] "RemoveContainer" containerID="480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.716194 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.716253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.716262 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.716280 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.716291 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.721577 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.745478 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.776808 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.793212 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.806089 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.815697 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.815732 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.815742 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.815761 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.815774 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.827466 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: E1210 12:57:55.835861 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.841776 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.844248 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.844305 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.844316 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.844335 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.844346 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.856639 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: E1210 12:57:55.856645 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.860293 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.860339 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.860356 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.860377 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.860412 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.867078 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: E1210 12:57:55.874459 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.878838 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.879327 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.879354 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.879369 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.879406 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.879418 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.890176 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: E1210 12:57:55.890495 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.896111 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.896171 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.896185 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.896205 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.896220 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.909931 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: E1210 12:57:55.911073 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: E1210 12:57:55.911287 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.913741 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.913792 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.913804 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.913822 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.913835 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:55Z","lastTransitionTime":"2025-12-10T12:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.923622 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.936485 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.947837 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.960067 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.971657 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:55 crc kubenswrapper[4921]: I1210 12:57:55.987216 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:54Z\\\",\\\"message\\\":\\\"2025-12-10T12:57:09+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_18fde2ae-40b8-48ee-8374-0eb68c5a88c2\\\\n2025-12-10T12:57:09+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_18fde2ae-40b8-48ee-8374-0eb68c5a88c2 to /host/opt/cni/bin/\\\\n2025-12-10T12:57:09Z [verbose] multus-daemon started\\\\n2025-12-10T12:57:09Z [verbose] Readiness Indicator file check\\\\n2025-12-10T12:57:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:55Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.017018 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.017081 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.017094 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.017134 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.017149 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.119897 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.119949 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.119958 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.119973 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.119984 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.192649 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.192721 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.192736 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.192667 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:56 crc kubenswrapper[4921]: E1210 12:57:56.192801 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:56 crc kubenswrapper[4921]: E1210 12:57:56.192876 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:56 crc kubenswrapper[4921]: E1210 12:57:56.192939 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:56 crc kubenswrapper[4921]: E1210 12:57:56.193027 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.223276 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.223318 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.223358 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.223377 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.223788 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.326789 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.326818 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.326826 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.326841 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.326850 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.429641 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.429695 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.429707 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.429727 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.429740 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.532307 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.532345 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.532357 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.532375 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.532407 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.634598 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.634635 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.634645 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.634664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.634676 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.704827 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pqlx4_78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e/kube-multus/0.log" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.704918 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pqlx4" event={"ID":"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e","Type":"ContainerStarted","Data":"78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.726044 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.737209 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.737232 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.737245 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.737262 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.737274 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.745377 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.761588 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.775879 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.790590 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.805673 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.819929 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.832434 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.840954 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.841078 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.841117 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.841139 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.841153 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.846093 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.856687 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.866261 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.877594 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.889620 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.899744 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.912171 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:54Z\\\",\\\"message\\\":\\\"2025-12-10T12:57:09+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_18fde2ae-40b8-48ee-8374-0eb68c5a88c2\\\\n2025-12-10T12:57:09+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_18fde2ae-40b8-48ee-8374-0eb68c5a88c2 to /host/opt/cni/bin/\\\\n2025-12-10T12:57:09Z [verbose] multus-daemon started\\\\n2025-12-10T12:57:09Z [verbose] Readiness Indicator file check\\\\n2025-12-10T12:57:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.930831 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.942241 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.943821 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.943855 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.943868 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.943886 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.943901 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:56Z","lastTransitionTime":"2025-12-10T12:57:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:56 crc kubenswrapper[4921]: I1210 12:57:56.953131 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:57:56Z is after 2025-08-24T17:21:41Z" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.046444 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.046475 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.046482 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.046497 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.046506 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.149263 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.149531 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.149648 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.149822 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.149917 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.252057 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.252085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.252092 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.252105 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.252115 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.355487 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.355558 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.355576 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.355601 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.355619 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.458584 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.458646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.458663 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.458692 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.458712 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.562373 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.562478 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.562496 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.562522 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.562541 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.665888 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.666310 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.666551 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.666743 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.667742 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.770824 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.770879 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.770894 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.770916 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.770931 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.874599 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.874647 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.874658 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.874696 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.874712 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.977852 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.977931 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.977948 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.977978 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:57 crc kubenswrapper[4921]: I1210 12:57:57.978000 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:57Z","lastTransitionTime":"2025-12-10T12:57:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.081281 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.081377 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.081447 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.081528 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.081596 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.184953 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.185005 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.185024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.185049 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.185072 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.192621 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:57:58 crc kubenswrapper[4921]: E1210 12:57:58.192798 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.193106 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:57:58 crc kubenswrapper[4921]: E1210 12:57:58.193216 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.193508 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:57:58 crc kubenswrapper[4921]: E1210 12:57:58.193619 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.193760 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:57:58 crc kubenswrapper[4921]: E1210 12:57:58.194035 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.288059 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.288124 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.288145 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.288172 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.288192 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.392072 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.392130 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.392146 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.392176 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.392194 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.494891 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.494965 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.494989 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.495020 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.495044 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.597882 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.597919 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.597928 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.597944 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.597955 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.701041 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.701089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.701102 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.701120 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.701134 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.804193 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.804234 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.804242 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.804258 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.804267 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.906541 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.906574 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.906582 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.906596 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:58 crc kubenswrapper[4921]: I1210 12:57:58.906606 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:58Z","lastTransitionTime":"2025-12-10T12:57:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.009900 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.009955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.009970 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.009994 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.010009 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.113043 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.113093 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.113105 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.113123 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.113135 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.216637 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.216693 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.216702 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.216719 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.216729 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.319423 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.319508 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.319521 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.319538 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.319550 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.422966 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.423032 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.423045 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.423069 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.423084 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.525820 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.525866 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.525878 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.525898 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.525912 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.628752 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.628811 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.628828 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.628852 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.628868 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.731512 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.731589 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.731603 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.731623 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.731637 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.834613 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.834685 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.834703 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.834733 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.834750 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.937513 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.937562 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.937571 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.937587 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:57:59 crc kubenswrapper[4921]: I1210 12:57:59.937600 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:57:59Z","lastTransitionTime":"2025-12-10T12:57:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.040658 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.040727 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.040747 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.040770 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.040787 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.144136 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.144205 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.144218 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.144248 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.144266 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.191952 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.192047 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.192114 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.192205 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:00 crc kubenswrapper[4921]: E1210 12:58:00.192237 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:00 crc kubenswrapper[4921]: E1210 12:58:00.192486 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:00 crc kubenswrapper[4921]: E1210 12:58:00.192645 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:00 crc kubenswrapper[4921]: E1210 12:58:00.192767 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.247527 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.247592 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.247611 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.247637 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.247655 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.351047 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.351113 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.351130 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.351155 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.351173 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.453970 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.454037 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.454059 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.454089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.454113 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.556925 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.556969 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.556979 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.556999 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.557009 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.660856 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.660926 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.660949 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.660977 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.660998 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.764744 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.764821 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.764839 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.764871 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.764890 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.868060 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.868166 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.868190 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.868224 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.868246 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.971351 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.971475 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.971501 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.971532 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:00 crc kubenswrapper[4921]: I1210 12:58:00.971556 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:00Z","lastTransitionTime":"2025-12-10T12:58:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.074887 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.074944 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.074963 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.074988 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.075006 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.178459 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.178537 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.178560 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.178592 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.178615 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.281871 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.281939 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.281974 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.282004 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.282027 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.385779 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.385910 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.385947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.385983 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.386008 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.489228 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.489308 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.489329 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.489358 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.489378 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.592655 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.592843 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.592867 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.592891 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.592910 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.696223 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.696522 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.696582 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.696650 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.696743 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.801931 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.801981 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.801993 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.802014 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.802027 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.905190 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.905253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.905270 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.905293 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:01 crc kubenswrapper[4921]: I1210 12:58:01.905310 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:01Z","lastTransitionTime":"2025-12-10T12:58:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.008578 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.008634 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.008715 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.008742 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.008761 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.112029 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.112100 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.112128 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.112185 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.112206 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.191889 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:02 crc kubenswrapper[4921]: E1210 12:58:02.192124 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.192277 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:02 crc kubenswrapper[4921]: E1210 12:58:02.192468 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.192551 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:02 crc kubenswrapper[4921]: E1210 12:58:02.192636 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.192711 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:02 crc kubenswrapper[4921]: E1210 12:58:02.192960 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.215319 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.215383 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.215454 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.215483 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.215502 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.318851 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.318899 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.318911 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.318927 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.318937 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.423452 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.423509 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.423525 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.423545 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.423560 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.527456 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.527523 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.527543 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.527571 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.527592 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.631167 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.631453 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.631470 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.631493 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.631509 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.734351 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.734410 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.734423 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.734440 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.734451 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.837823 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.837888 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.837911 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.837945 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.837963 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.941050 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.941091 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.941102 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.941121 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:02 crc kubenswrapper[4921]: I1210 12:58:02.941134 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:02Z","lastTransitionTime":"2025-12-10T12:58:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.044507 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.044561 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.044573 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.044592 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.044608 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.147999 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.148059 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.148074 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.148097 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.148112 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.219967 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"371fafdc-aa16-4608-aaa2-e419c4ddbc18\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b9a190a657ca03f3fb08626b7af512164ff131b1783b903a02005a111a7036c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57991b0cb6fd4b37082ff5d4eecc6227d77f241e9a983cd3e0eb9db5b485865f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c24d974446ee70bf587bf3969542cda98f062a9cc78b6af73005d9b8d0a6ee02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5a3f231014293fc0412e577cf9840f62f8db869ea4f0f8bef1bfc5112b38cf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://17a6158acd097054719316d2ad29dc036546d3951bb1e8dd010618f9155270a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://534968b5f5d9e7b3063c91a3e0b68ba04d83e2cb65ab688b23d284adc6852155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0acb3ca5fa3945c89412f466b00193354c94ce56dbba608c104d3baf555a2c3a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7d1b714acf0f278cc0310204225d417266a241f1ea827dc625f7b89a7d0ebac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.233885 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02726135-3050-46a1-a3ab-b2ce46cdb75d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12292f0529bcf32fb33e5accfbd0dfd7d53e377a9ee2046d4ca6efc78fe1c31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f66fe2144cde40619405c04d7d83cbcc2e78503401df428502abad1682d4cb7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4244835c7f038a7c1bf4820de49854350a23fac13c5a252a1553f6508594f10e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.249075 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.252853 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.252920 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.252937 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.252960 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.252976 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.270633 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://307b845aae3352df08e2f9fd394f4110a37b2a21650593ebb584c5bf37d01397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3be8a498516e12174c8b5612669fd69deef610c01ed9884a5228cd436bbae3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.287838 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-86bpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"709b4982-f2e6-4692-ab1a-c1d5b7d507ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c842f48ca574d23a086e1b248c17102895f4f45897ac87ddcc1f98f170a22bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34d9e720fab0818e4cdf1e2a4da042a5648c7c396fedf17b395ad07ececd5c9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04723fc3840c9d632dae527a5afa04fc7eea858426056da3dfe8e72186198ab1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf25ba213f519cad3c21233c0f3d2a383d978543da8ea1db41bb60dd29f9f3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa63608618bc4f059414317df70f14a33321d5aed291adc02a9daac92cf5428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e54a218f367591b87841a39399d5889344b8b92fcc70d77105a0191d3dba37c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d90adbff8edcd85eebe4858e412769dff7a05b05bbe7fc533906b55e6ee415e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z76j8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-86bpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.307817 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.327763 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.339885 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"354355f7-6630-49a8-bdc5-5e875feecb7f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c45fd7d4d0bb91e995e76a0d813660f9b488a4765e3a21eab2485e1ff03ff3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dbm9s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-vn2n6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.353565 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f57208b0-80bc-4c1b-bbab-9d2f858972f6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-10T12:57:01Z\\\",\\\"message\\\":\\\"et denominator for mutating requests\\\\\\\" limit=200\\\\nI1210 12:57:01.294872 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1210 12:57:01.294893 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1210 12:57:01.294918 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294926 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1210 12:57:01.294932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1210 12:57:01.294934 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1210 12:57:01.294938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1210 12:57:01.294941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1210 12:57:01.301734 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2446357718/tls.crt::/tmp/serving-cert-2446357718/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765371405\\\\\\\\\\\\\\\" (2025-12-10 12:56:44 +0000 UTC to 2026-01-09 12:56:45 +0000 UTC (now=2025-12-10 12:57:01.30169166 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301889 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1765371416\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1765371416\\\\\\\\\\\\\\\" (2025-12-10 11:56:55 +0000 UTC to 2026-12-10 11:56:55 +0000 UTC (now=2025-12-10 12:57:01.301865574 +0000 UTC))\\\\\\\"\\\\nI1210 12:57:01.301907 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1210 12:57:01.301934 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nF1210 12:57:01.302850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.355804 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.355844 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.355856 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.355877 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.355890 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.364309 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jskgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ad003cc-9fcc-4fc6-86b9-247b30013c0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8c0dc3ea5672198c430f12ce59b7f2a66100fe52e0f7b4552deba97144250d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m875h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:11Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jskgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.377300 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd245e67c99943297f64701eba8772143dc206caf67849eaf2f9a8e82dab0d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.394123 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2789e9cd1bca4abecf0939aad4a5f63bdc250a525ad3664bc2440e8b0b7a834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.407790 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zmks6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f2626c5-78df-45d2-8970-c4f99790a0fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d43ebe41a779225842dfa1c4d3be01575113b67ada9be07f553df1514e9dcf85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ft9kj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zmks6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.422221 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pqlx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:54Z\\\",\\\"message\\\":\\\"2025-12-10T12:57:09+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_18fde2ae-40b8-48ee-8374-0eb68c5a88c2\\\\n2025-12-10T12:57:09+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_18fde2ae-40b8-48ee-8374-0eb68c5a88c2 to /host/opt/cni/bin/\\\\n2025-12-10T12:57:09Z [verbose] multus-daemon started\\\\n2025-12-10T12:57:09Z [verbose] Readiness Indicator file check\\\\n2025-12-10T12:57:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhs2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pqlx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.439112 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50684108-04fc-405c-82be-d21d16cd650b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T12:57:36Z\\\",\\\"message\\\":\\\"1210 12:57:35.655418 6472 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 in node crc\\\\nI1210 12:57:35.655423 6472 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-m7n89 after 0 failed attempt(s)\\\\nI1210 12:57:35.655419 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1210 12:57:35.655437 6472 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 12:57:35.655454 6472 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}\\\\nI1210 12:57:35.655469 6472 services_controller.go:360] Finished syncing service cluster-autoscaler-operator on namespace openshift-machine-api for network=default : 3.746055ms\\\\nI1210 12:57:35.655482 6472 services_controller.go:356] Processing sync for service openshift-apiserver-operator/metrics for network=default\\\\nF1210 12:57:35.655502 6472 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m7n89_openshift-ovn-kubernetes(50684108-04fc-405c-82be-d21d16cd650b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:57:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ddcs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m7n89\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.451329 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6b3380f-1dd4-45de-9c44-eaa37f965801\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18de9b421542cbc73b0797f1d6e6e6752b88c3f802e5f2fd16d303de041ac72a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49ddf0d56e11ffafc30ec8b0065dd6ef3c3decdbf696e169013572c830f6557c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:57:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqtvp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:21Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wwrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.459369 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.459432 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.459445 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.459466 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.459481 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.462269 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:22Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9vnm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:57:22Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j2nnf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.472691 4921 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ec9bd81-b3fb-41db-acd3-2aff9c4f1c91\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:57:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T12:56:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8db8d5b587ec546f8a084fec36cbe7f89aa6998f4bc6dbd1bb9fd22a35f1384e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://efb9127602fc13526bf57fef51bb7814039a1507cee77693ee2723ffc18620bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0195b43f37c1b874a0daf78d2a91ec39fd64c275503d9f4ec64b74a0d8e423\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T12:56:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14f71d57ff5277b39ee8d2960b247b98bd9d7ee9993d0fdcb6338c2386b1bc6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T12:56:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T12:56:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T12:56:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:03Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.561748 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.561794 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.561807 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.561837 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.561852 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.665024 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.665079 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.665094 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.665117 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.665132 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.768631 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.768697 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.768720 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.768749 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.768770 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.872328 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.872501 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.872528 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.872561 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.872584 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.975803 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.975885 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.975925 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.975964 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:03 crc kubenswrapper[4921]: I1210 12:58:03.976001 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:03Z","lastTransitionTime":"2025-12-10T12:58:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.079833 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.079887 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.079902 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.079923 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.079938 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.184079 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.184134 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.184150 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.184177 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.184197 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.192029 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:04 crc kubenswrapper[4921]: E1210 12:58:04.192304 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.192796 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:04 crc kubenswrapper[4921]: E1210 12:58:04.192953 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.192991 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.193056 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:04 crc kubenswrapper[4921]: E1210 12:58:04.193149 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:04 crc kubenswrapper[4921]: E1210 12:58:04.193243 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.287734 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.287806 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.287823 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.287851 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.287870 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.390858 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.390903 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.390918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.390936 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.390948 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.494148 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.494212 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.494228 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.494280 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.494305 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.598775 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.598843 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.598955 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.599010 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.599029 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.703528 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.703608 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.703626 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.703654 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.703675 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.806778 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.806826 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.806839 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.806857 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.806870 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.911009 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.911684 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.911768 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.911840 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:04 crc kubenswrapper[4921]: I1210 12:58:04.912137 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:04Z","lastTransitionTime":"2025-12-10T12:58:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.016496 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.016586 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.016610 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.016642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.016666 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.120170 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.120232 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.120252 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.120278 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.120296 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.206982 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.223723 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.224087 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.224287 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.224522 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.224723 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.328312 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.328373 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.328422 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.328449 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.328468 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.432601 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.432662 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.432679 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.432706 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.432726 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.536015 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.536067 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.536120 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.536664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.536706 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.640274 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.640345 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.640362 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.640415 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.640434 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.743502 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.743549 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.743557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.743597 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.743609 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.847134 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.847186 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.847205 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.847231 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.847250 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.951008 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.951067 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.951086 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.951121 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:05 crc kubenswrapper[4921]: I1210 12:58:05.951150 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:05Z","lastTransitionTime":"2025-12-10T12:58:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.057013 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.057103 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.057132 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.057184 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.057211 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.108580 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.108771 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.108905 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.109116 4921 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.109207 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:59:10.109181294 +0000 UTC m=+147.325403258 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.109702 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:59:10.109648137 +0000 UTC m=+147.325870101 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.109761 4921 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.109911 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 12:59:10.109879253 +0000 UTC m=+147.326101217 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.161296 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.161379 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.161436 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.161467 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.161489 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.165486 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.165551 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.165570 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.165595 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.165615 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.189166 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:06Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.192099 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.192462 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.192601 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.192850 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.192816 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.193021 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.193200 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.193449 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.196919 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.197213 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.197462 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.197710 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.197931 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.210374 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.210514 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210717 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210746 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210766 4921 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210820 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210856 4921 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210863 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 12:59:10.210831869 +0000 UTC m=+147.427053823 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210875 4921 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.210934 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 12:59:10.210914281 +0000 UTC m=+147.427136235 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.224512 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:06Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.230294 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.230354 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.230373 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.230442 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.230465 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.249590 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:06Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.256255 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.256434 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.256460 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.256490 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.256512 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.275835 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:06Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.281009 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.281071 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.281089 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.281122 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.281142 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.302067 4921 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T12:58:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"aa6d129a-c0be-471d-913f-2184d68fb040\\\",\\\"systemUUID\\\":\\\"539c9d38-f260-4af7-b6c3-f4170bf93c3e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T12:58:06Z is after 2025-08-24T17:21:41Z" Dec 10 12:58:06 crc kubenswrapper[4921]: E1210 12:58:06.302355 4921 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.305097 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.305142 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.305157 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.305180 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.305194 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.408796 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.408880 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.408907 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.408940 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.408961 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.513125 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.513177 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.513191 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.513234 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.513251 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.617079 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.617150 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.617168 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.617196 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.617218 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.719414 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.719457 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.719468 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.719495 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.719507 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.821600 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.821665 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.821688 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.821718 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.821742 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.925441 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.925509 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.925526 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.925553 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:06 crc kubenswrapper[4921]: I1210 12:58:06.925571 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:06Z","lastTransitionTime":"2025-12-10T12:58:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.030051 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.030201 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.030227 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.030253 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.030272 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.136194 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.136261 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.136279 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.136308 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.136326 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.243618 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.243668 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.243680 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.243699 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.243711 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.347208 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.347246 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.347256 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.347272 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.347285 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.450187 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.450243 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.450260 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.450281 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.450296 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.553819 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.553906 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.553931 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.553962 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.553986 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.659682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.659742 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.659764 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.659862 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.659887 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.764348 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.764489 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.764514 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.765017 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.765309 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.869001 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.869075 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.869097 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.869125 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.869146 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.972709 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.972783 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.972806 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.972837 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:07 crc kubenswrapper[4921]: I1210 12:58:07.972862 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:07Z","lastTransitionTime":"2025-12-10T12:58:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.076930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.077368 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.077508 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.077552 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.077577 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.181525 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.181599 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.181620 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.181647 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.181668 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.192126 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:08 crc kubenswrapper[4921]: E1210 12:58:08.192537 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.192608 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:08 crc kubenswrapper[4921]: E1210 12:58:08.193127 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.192698 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:08 crc kubenswrapper[4921]: E1210 12:58:08.193701 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.192674 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:08 crc kubenswrapper[4921]: E1210 12:58:08.194231 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.285313 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.285381 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.285443 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.285469 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.285486 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.388966 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.389034 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.389056 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.389085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.389109 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.493184 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.493786 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.493938 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.494130 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.494335 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.596664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.596713 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.596731 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.596754 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.596771 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.700945 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.700985 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.700994 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.701012 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.701023 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.804430 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.804868 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.805124 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.805335 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.805569 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.909362 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.909429 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.909441 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.909459 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:08 crc kubenswrapper[4921]: I1210 12:58:08.909485 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:08Z","lastTransitionTime":"2025-12-10T12:58:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.012562 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.012639 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.012661 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.012692 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.012720 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.115582 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.115716 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.115739 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.115774 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.115828 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.193462 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.219080 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.219152 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.219171 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.219201 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.219222 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.323195 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.323260 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.323280 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.323309 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.323328 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.427555 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.427664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.427684 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.427742 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.427764 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.531207 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.531265 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.531283 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.531307 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.531337 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.634804 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.634870 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.634890 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.634914 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.634933 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.738143 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.738195 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.738211 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.738235 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.738252 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.842556 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.842635 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.842702 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.842735 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.842754 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.947037 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.947114 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.947133 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.947157 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:09 crc kubenswrapper[4921]: I1210 12:58:09.947174 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:09Z","lastTransitionTime":"2025-12-10T12:58:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.052216 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.052276 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.052299 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.052326 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.052343 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.156363 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.156471 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.156495 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.156526 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.156549 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.193276 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:10 crc kubenswrapper[4921]: E1210 12:58:10.193466 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.193609 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:10 crc kubenswrapper[4921]: E1210 12:58:10.193721 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.193818 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:10 crc kubenswrapper[4921]: E1210 12:58:10.193984 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.194057 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:10 crc kubenswrapper[4921]: E1210 12:58:10.194137 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.259240 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.259311 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.259331 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.259357 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.259375 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.362098 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.362145 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.362170 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.362188 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.362200 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.464874 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.464933 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.464943 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.464960 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.464971 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.567170 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.567207 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.567217 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.567232 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.567243 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.670963 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.671001 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.671014 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.671032 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.671044 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.760988 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/2.log" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.765060 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerStarted","Data":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.766820 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.774557 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.774601 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.774611 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.774629 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.774646 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.796170 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-j2nnf"] Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.796359 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:10 crc kubenswrapper[4921]: E1210 12:58:10.796536 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.844553 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podStartSLOduration=63.844532057 podStartE2EDuration="1m3.844532057s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.842717567 +0000 UTC m=+88.058939501" watchObservedRunningTime="2025-12-10 12:58:10.844532057 +0000 UTC m=+88.060753981" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.844754 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-jskgz" podStartSLOduration=63.844749373 podStartE2EDuration="1m3.844749373s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.822000091 +0000 UTC m=+88.038222055" watchObservedRunningTime="2025-12-10 12:58:10.844749373 +0000 UTC m=+88.060971297" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.868116 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wwrv2" podStartSLOduration=63.868084762 podStartE2EDuration="1m3.868084762s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.856769057 +0000 UTC m=+88.072991011" watchObservedRunningTime="2025-12-10 12:58:10.868084762 +0000 UTC m=+88.084306726" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.877060 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.877117 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.877139 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.877189 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.877206 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.887622 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=35.887596664 podStartE2EDuration="35.887596664s" podCreationTimestamp="2025-12-10 12:57:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.88744166 +0000 UTC m=+88.103663604" watchObservedRunningTime="2025-12-10 12:58:10.887596664 +0000 UTC m=+88.103818588" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.947280 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-zmks6" podStartSLOduration=63.947254612 podStartE2EDuration="1m3.947254612s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.92847599 +0000 UTC m=+88.144697914" watchObservedRunningTime="2025-12-10 12:58:10.947254612 +0000 UTC m=+88.163476536" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.947589 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-pqlx4" podStartSLOduration=63.947583331 podStartE2EDuration="1m3.947583331s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.947028216 +0000 UTC m=+88.163250140" watchObservedRunningTime="2025-12-10 12:58:10.947583331 +0000 UTC m=+88.163805255" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.979821 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.980135 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.980243 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.980331 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.980439 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:10Z","lastTransitionTime":"2025-12-10T12:58:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:10 crc kubenswrapper[4921]: I1210 12:58:10.982103 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-86bpd" podStartSLOduration=63.98208299 podStartE2EDuration="1m3.98208299s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:10.981929506 +0000 UTC m=+88.198151440" watchObservedRunningTime="2025-12-10 12:58:10.98208299 +0000 UTC m=+88.198304914" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.000819 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=6.00079737 podStartE2EDuration="6.00079737s" podCreationTimestamp="2025-12-10 12:58:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:11.00078979 +0000 UTC m=+88.217011724" watchObservedRunningTime="2025-12-10 12:58:11.00079737 +0000 UTC m=+88.217019294" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.030747 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=70.030725862 podStartE2EDuration="1m10.030725862s" podCreationTimestamp="2025-12-10 12:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:11.028261763 +0000 UTC m=+88.244483697" watchObservedRunningTime="2025-12-10 12:58:11.030725862 +0000 UTC m=+88.246947806" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.052087 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=70.052061005 podStartE2EDuration="1m10.052061005s" podCreationTimestamp="2025-12-10 12:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:11.051011586 +0000 UTC m=+88.267233510" watchObservedRunningTime="2025-12-10 12:58:11.052061005 +0000 UTC m=+88.268282929" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.083027 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.083309 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.083421 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.083539 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.083633 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.084425 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=69.084415224 podStartE2EDuration="1m9.084415224s" podCreationTimestamp="2025-12-10 12:57:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:11.082659415 +0000 UTC m=+88.298881349" watchObservedRunningTime="2025-12-10 12:58:11.084415224 +0000 UTC m=+88.300637148" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.134466 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podStartSLOduration=64.134444304 podStartE2EDuration="1m4.134444304s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:11.133877849 +0000 UTC m=+88.350099773" watchObservedRunningTime="2025-12-10 12:58:11.134444304 +0000 UTC m=+88.350666228" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.185829 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.185894 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.185908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.185930 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.185947 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.288625 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.288668 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.288679 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.288697 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.288709 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.390717 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.390796 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.390809 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.390826 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.390838 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.493880 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.493940 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.493956 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.493980 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.493998 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.598568 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.598631 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.598646 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.598667 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.598681 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.701682 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.701729 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.701739 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.701757 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.701777 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.804971 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.805034 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.805055 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.805081 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.805104 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.907420 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.907464 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.907475 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.907491 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:11 crc kubenswrapper[4921]: I1210 12:58:11.907505 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:11Z","lastTransitionTime":"2025-12-10T12:58:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.009527 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.009571 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.009581 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.009598 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.009608 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.112396 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.112447 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.112456 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.112473 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.112487 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.192603 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.192661 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.192661 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.192722 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:12 crc kubenswrapper[4921]: E1210 12:58:12.192777 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 12:58:12 crc kubenswrapper[4921]: E1210 12:58:12.193531 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 12:58:12 crc kubenswrapper[4921]: E1210 12:58:12.193601 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 12:58:12 crc kubenswrapper[4921]: E1210 12:58:12.193687 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j2nnf" podUID="9cc656f0-ce36-474b-9fa3-1ce9f43675a4" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.215816 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.215859 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.215870 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.215888 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.215930 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.318918 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.319046 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.319070 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.319182 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.319211 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.423031 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.423085 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.423094 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.423119 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.423132 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.526345 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.526429 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.526439 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.526455 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.526467 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.629483 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.629591 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.629613 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.629642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.629661 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.732873 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.732934 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.732947 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.732968 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.732984 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.835788 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.835855 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.835877 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.835908 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.835930 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.944999 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.945083 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.945106 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.945817 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:12 crc kubenswrapper[4921]: I1210 12:58:12.945863 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:12Z","lastTransitionTime":"2025-12-10T12:58:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.050290 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.050367 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.050421 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.050455 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.050480 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.153038 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.153088 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.153099 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.153117 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.153130 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.256552 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.256639 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.256664 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.256697 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.256727 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.360858 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.360972 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.360991 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.361023 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.361045 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.463586 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.463617 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.463628 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.463642 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.463652 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.567043 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.567112 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.567130 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.567156 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.567174 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.671198 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.671250 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.671267 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.671291 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.671308 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.778870 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.778974 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.778992 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.779012 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.779026 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.882643 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.882708 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.882725 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.882751 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.882769 4921 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T12:58:13Z","lastTransitionTime":"2025-12-10T12:58:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.984941 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.984980 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.984989 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.985002 4921 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 10 12:58:13 crc kubenswrapper[4921]: I1210 12:58:13.985092 4921 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.028270 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.028944 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.044532 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jwb9f"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.044549 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.044968 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045064 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045146 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045223 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045310 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045374 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045371 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.045449 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.050618 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qq2ff"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.051053 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.051114 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.051935 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.052427 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.052720 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5f9l6"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.053054 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.054490 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.054597 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.054687 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.054801 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.054965 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.055006 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.055049 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.055455 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.057891 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.058278 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mtf22"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.058480 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.058739 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.059480 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.059517 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.068188 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.068728 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.069046 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8pb7c"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.069316 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.070081 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.070430 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.070863 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gwnkp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.071536 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.071636 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jwb9f"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.080370 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.083059 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.084095 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dbzbp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.084553 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.084706 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.084886 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.084973 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.085032 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.086484 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.086635 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087420 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087574 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087619 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087691 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087764 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087658 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.087838 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.092976 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.093195 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.093310 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.093510 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.093596 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.093730 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.121971 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.122301 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.122458 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.128587 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.132418 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135839 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-audit-dir\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135878 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-etcd-client\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135900 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-serving-cert\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135921 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-audit-policies\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135943 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135968 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcswl\" (UniqueName: \"kubernetes.io/projected/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-kube-api-access-gcswl\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.135991 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.136010 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-images\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.136073 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.136108 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-encryption-config\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.136127 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-config\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.136188 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bscb\" (UniqueName: \"kubernetes.io/projected/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-kube-api-access-5bscb\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.140705 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.140891 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.140982 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.142577 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.143705 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.143834 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g7bns"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.143952 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144118 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144262 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144368 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144467 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144507 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144424 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144533 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144592 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.144664 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.145338 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.145456 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.145576 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.145695 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.145796 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.145969 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qkh5b"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.146373 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151011 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151153 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151249 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151385 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151522 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151627 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151735 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.151828 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.152048 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.152246 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.153778 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.157631 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.157782 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.157866 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.157974 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159070 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159288 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159364 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159535 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.160297 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159567 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159645 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159683 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159736 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.159829 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.165140 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.169366 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.169642 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.170683 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.172528 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.174599 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.174804 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.174928 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.175055 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.175646 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.175900 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.176267 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.176359 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.177484 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.177490 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.183530 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.188614 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.189121 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.190891 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.191331 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.192545 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.192835 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.195367 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.196014 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nddsp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.196287 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kxlr2"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.196833 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-556st"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.197402 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.197631 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.197779 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.197876 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198102 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198149 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198341 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198377 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198733 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198771 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.198979 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.202611 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.202688 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.218566 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.219262 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.219417 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.226813 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.232259 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.233374 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.240905 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.241522 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7smdt"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.241907 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.242879 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246509 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcswl\" (UniqueName: \"kubernetes.io/projected/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-kube-api-access-gcswl\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246593 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-encryption-config\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246640 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-audit-policies\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246673 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/959e5098-562a-471a-9396-fed74ed113b5-config-volume\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246696 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b553e21-002b-4905-8f32-6950d71db324-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246716 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csv6g\" (UniqueName: \"kubernetes.io/projected/03c59650-42ea-4995-ada6-f86eb8aed1de-kube-api-access-csv6g\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246741 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e786864-e8b1-4a03-9327-14d389a5bc21-serving-cert\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246761 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k94kf\" (UniqueName: \"kubernetes.io/projected/ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4-kube-api-access-k94kf\") pod \"control-plane-machine-set-operator-78cbb6b69f-w8czv\" (UID: \"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246778 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-client-ca\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246805 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246824 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8s8t\" (UniqueName: \"kubernetes.io/projected/76a2b163-6490-4d77-947f-c7333cb25129-kube-api-access-l8s8t\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246859 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246879 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b553e21-002b-4905-8f32-6950d71db324-proxy-tls\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246899 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b8366f4-8622-49dd-a0ab-d832fd34bccd-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246923 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks8fc\" (UniqueName: \"kubernetes.io/projected/ad2115a5-1371-4a19-b1e8-7f93a7719a71-kube-api-access-ks8fc\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246953 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5fhj\" (UniqueName: \"kubernetes.io/projected/4ae7e30d-338b-464e-baef-c7e304ff67d9-kube-api-access-k5fhj\") pod \"migrator-59844c95c7-8fpc5\" (UID: \"4ae7e30d-338b-464e-baef-c7e304ff67d9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.246994 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247023 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkmwp\" (UniqueName: \"kubernetes.io/projected/f0bed6a9-002b-40fc-89d5-031568b7e47c-kube-api-access-kkmwp\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247096 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b553e21-002b-4905-8f32-6950d71db324-images\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247125 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61feb33b-8fe3-4ab5-b3a2-df7db4172225-trusted-ca\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247147 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61feb33b-8fe3-4ab5-b3a2-df7db4172225-config\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247178 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1deb2d-0d02-4085-8aba-d05024240e34-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247209 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/959e5098-562a-471a-9396-fed74ed113b5-secret-volume\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247235 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62qtp\" (UniqueName: \"kubernetes.io/projected/2e786864-e8b1-4a03-9327-14d389a5bc21-kube-api-access-62qtp\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247268 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247296 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp5br\" (UniqueName: \"kubernetes.io/projected/959e5098-562a-471a-9396-fed74ed113b5-kube-api-access-kp5br\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247321 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-dir\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247349 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247376 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-config\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247433 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/617cf016-593a-4d56-b104-e450cd6368ee-audit-dir\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247471 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/76a2b163-6490-4d77-947f-c7333cb25129-proxy-tls\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247535 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-oauth-serving-cert\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247563 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247598 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-config\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247624 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-trusted-ca-bundle\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247653 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247690 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-config\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247906 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b8366f4-8622-49dd-a0ab-d832fd34bccd-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247927 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bscb\" (UniqueName: \"kubernetes.io/projected/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-kube-api-access-5bscb\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247947 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/617cf016-593a-4d56-b104-e450cd6368ee-node-pullsecrets\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247969 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j4s5\" (UniqueName: \"kubernetes.io/projected/61feb33b-8fe3-4ab5-b3a2-df7db4172225-kube-api-access-7j4s5\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.247989 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-service-ca\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248009 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-audit-dir\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248028 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248047 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-audit\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248069 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-config\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248092 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-etcd-client\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248114 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248139 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.248377 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-audit-policies\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.275647 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ed4e7481-d39e-45b7-98e6-8dceac35361c-signing-cabundle\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.277051 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.277511 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.280350 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.281551 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.281955 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282172 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282185 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282612 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-oauth-config\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282655 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-config\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282675 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-w8czv\" (UID: \"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282693 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-etcd-serving-ca\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282730 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282748 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61feb33b-8fe3-4ab5-b3a2-df7db4172225-serving-cert\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282798 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-audit-dir\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282805 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-images\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282827 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282852 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws6sh\" (UniqueName: \"kubernetes.io/projected/617cf016-593a-4d56-b104-e450cd6368ee-kube-api-access-ws6sh\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282949 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282976 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ed4e7481-d39e-45b7-98e6-8dceac35361c-signing-key\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.282996 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skdpq\" (UniqueName: \"kubernetes.io/projected/2b8366f4-8622-49dd-a0ab-d832fd34bccd-kube-api-access-skdpq\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283036 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/76a2b163-6490-4d77-947f-c7333cb25129-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283057 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-policies\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283074 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fks2\" (UniqueName: \"kubernetes.io/projected/00792261-f23c-4fc8-a67b-4b7753b692a1-kube-api-access-9fks2\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283113 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283131 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283146 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-serving-cert\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283162 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr26f\" (UniqueName: \"kubernetes.io/projected/9ab094b2-78dc-4ee6-b563-a1ae064588cf-kube-api-access-kr26f\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283212 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-encryption-config\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283229 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-image-import-ca\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283245 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hmv5\" (UniqueName: \"kubernetes.io/projected/5b553e21-002b-4905-8f32-6950d71db324-kube-api-access-2hmv5\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283422 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283514 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcs8n\" (UniqueName: \"kubernetes.io/projected/ed4e7481-d39e-45b7-98e6-8dceac35361c-kube-api-access-tcs8n\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283534 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-service-ca-bundle\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283549 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-serving-cert\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283566 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283583 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-config\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283615 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283632 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f1deb2d-0d02-4085-8aba-d05024240e34-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283648 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0bed6a9-002b-40fc-89d5-031568b7e47c-serving-cert\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283679 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03c59650-42ea-4995-ada6-f86eb8aed1de-serving-cert\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283698 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-etcd-client\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283717 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-serving-cert\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283733 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-client-ca\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.283749 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f1deb2d-0d02-4085-8aba-d05024240e34-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.284349 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-images\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.284958 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.286834 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-config\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.286992 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.287472 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.287530 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.287734 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.287763 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.288215 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.288490 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.288698 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.291509 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.291962 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.295146 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.301917 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-encryption-config\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.302909 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-etcd-client\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.306887 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-serving-cert\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.308261 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.308903 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-sjkkc"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.309337 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-xtvnd"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.309753 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.310036 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.310221 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.310249 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.310594 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7hhsc"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.311176 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.311533 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.312054 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.312424 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.315674 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.315882 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.315921 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.328353 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.328403 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.333345 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.340330 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.342510 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.345302 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qq2ff"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.346965 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.348232 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-78cdm"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.348968 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.350375 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.351625 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.353055 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.354085 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nddsp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.354672 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.362288 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gwnkp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.363556 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-sh5l7"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.364328 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.370823 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dbzbp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.372006 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5f9l6"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.374377 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.376932 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.378690 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-vp9nq"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.379505 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.380829 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.382376 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-m9sh2"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386066 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386114 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c94527b-255d-4486-9a36-9fd0f7efd4d7-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386146 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr26f\" (UniqueName: \"kubernetes.io/projected/9ab094b2-78dc-4ee6-b563-a1ae064588cf-kube-api-access-kr26f\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386181 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386207 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-serving-cert\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386230 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-image-import-ca\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386249 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hmv5\" (UniqueName: \"kubernetes.io/projected/5b553e21-002b-4905-8f32-6950d71db324-kube-api-access-2hmv5\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386275 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386299 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x4f9\" (UniqueName: \"kubernetes.io/projected/3c94527b-255d-4486-9a36-9fd0f7efd4d7-kube-api-access-6x4f9\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386327 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcs8n\" (UniqueName: \"kubernetes.io/projected/ed4e7481-d39e-45b7-98e6-8dceac35361c-kube-api-access-tcs8n\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386349 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-service-ca-bundle\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386373 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm7cr\" (UniqueName: \"kubernetes.io/projected/a509d814-6573-4bfd-92c7-8d15a2d47228-kube-api-access-mm7cr\") pod \"cluster-samples-operator-665b6dd947-rgbkv\" (UID: \"a509d814-6573-4bfd-92c7-8d15a2d47228\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386411 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-config\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386442 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-serving-cert\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386464 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386490 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386513 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f1deb2d-0d02-4085-8aba-d05024240e34-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386533 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0bed6a9-002b-40fc-89d5-031568b7e47c-serving-cert\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386594 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-client-ca\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386620 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f1deb2d-0d02-4085-8aba-d05024240e34-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386642 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03c59650-42ea-4995-ada6-f86eb8aed1de-serving-cert\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386662 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b3ad786b-c5ca-47ba-8188-680f6c13192b-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386684 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-encryption-config\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386718 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/959e5098-562a-471a-9396-fed74ed113b5-config-volume\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386739 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b553e21-002b-4905-8f32-6950d71db324-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386762 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/abc0551e-9e8b-4466-9eea-1da60ead65bb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kwm5b\" (UID: \"abc0551e-9e8b-4466-9eea-1da60ead65bb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386794 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e786864-e8b1-4a03-9327-14d389a5bc21-serving-cert\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386814 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k94kf\" (UniqueName: \"kubernetes.io/projected/ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4-kube-api-access-k94kf\") pod \"control-plane-machine-set-operator-78cbb6b69f-w8czv\" (UID: \"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386838 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csv6g\" (UniqueName: \"kubernetes.io/projected/03c59650-42ea-4995-ada6-f86eb8aed1de-kube-api-access-csv6g\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386862 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zx2b\" (UniqueName: \"kubernetes.io/projected/1172878a-36b0-44ca-9df0-7eff88ebadfd-kube-api-access-6zx2b\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386885 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386906 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-client-ca\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386924 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1172878a-36b0-44ca-9df0-7eff88ebadfd-config\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386947 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ad786b-c5ca-47ba-8188-680f6c13192b-config\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.386984 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b553e21-002b-4905-8f32-6950d71db324-proxy-tls\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387068 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b8366f4-8622-49dd-a0ab-d832fd34bccd-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387091 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8s8t\" (UniqueName: \"kubernetes.io/projected/76a2b163-6490-4d77-947f-c7333cb25129-kube-api-access-l8s8t\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387114 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks8fc\" (UniqueName: \"kubernetes.io/projected/ad2115a5-1371-4a19-b1e8-7f93a7719a71-kube-api-access-ks8fc\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387139 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc6b8\" (UniqueName: \"kubernetes.io/projected/abc0551e-9e8b-4466-9eea-1da60ead65bb-kube-api-access-xc6b8\") pod \"package-server-manager-789f6589d5-kwm5b\" (UID: \"abc0551e-9e8b-4466-9eea-1da60ead65bb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387161 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5fhj\" (UniqueName: \"kubernetes.io/projected/4ae7e30d-338b-464e-baef-c7e304ff67d9-kube-api-access-k5fhj\") pod \"migrator-59844c95c7-8fpc5\" (UID: \"4ae7e30d-338b-464e-baef-c7e304ff67d9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387185 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387355 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b553e21-002b-4905-8f32-6950d71db324-images\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387379 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61feb33b-8fe3-4ab5-b3a2-df7db4172225-trusted-ca\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387416 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkmwp\" (UniqueName: \"kubernetes.io/projected/f0bed6a9-002b-40fc-89d5-031568b7e47c-kube-api-access-kkmwp\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387441 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3ad786b-c5ca-47ba-8188-680f6c13192b-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387464 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/959e5098-562a-471a-9396-fed74ed113b5-secret-volume\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387485 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61feb33b-8fe3-4ab5-b3a2-df7db4172225-config\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387509 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1deb2d-0d02-4085-8aba-d05024240e34-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387616 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62qtp\" (UniqueName: \"kubernetes.io/projected/2e786864-e8b1-4a03-9327-14d389a5bc21-kube-api-access-62qtp\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387644 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glfrn\" (UniqueName: \"kubernetes.io/projected/23dc36ea-02d4-493e-863b-56126c624ab0-kube-api-access-glfrn\") pod \"downloads-7954f5f757-7smdt\" (UID: \"23dc36ea-02d4-493e-863b-56126c624ab0\") " pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387669 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp5br\" (UniqueName: \"kubernetes.io/projected/959e5098-562a-471a-9396-fed74ed113b5-kube-api-access-kp5br\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387691 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-dir\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387711 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387734 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/617cf016-593a-4d56-b104-e450cd6368ee-audit-dir\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387842 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-oauth-serving-cert\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387863 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387885 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/76a2b163-6490-4d77-947f-c7333cb25129-proxy-tls\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387916 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-config\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387939 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-trusted-ca-bundle\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.387961 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b8366f4-8622-49dd-a0ab-d832fd34bccd-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388007 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/617cf016-593a-4d56-b104-e450cd6368ee-node-pullsecrets\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388030 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j4s5\" (UniqueName: \"kubernetes.io/projected/61feb33b-8fe3-4ab5-b3a2-df7db4172225-kube-api-access-7j4s5\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388053 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388074 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-config\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388092 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388113 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-service-ca\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388159 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-audit\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388183 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c94527b-255d-4486-9a36-9fd0f7efd4d7-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388202 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-config\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388222 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-etcd-client\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388245 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388268 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a509d814-6573-4bfd-92c7-8d15a2d47228-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rgbkv\" (UID: \"a509d814-6573-4bfd-92c7-8d15a2d47228\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388346 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-oauth-config\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388367 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-config\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388405 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-w8czv\" (UID: \"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388429 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388450 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ed4e7481-d39e-45b7-98e6-8dceac35361c-signing-cabundle\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388469 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61feb33b-8fe3-4ab5-b3a2-df7db4172225-serving-cert\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388558 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-etcd-serving-ca\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388582 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388605 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388627 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws6sh\" (UniqueName: \"kubernetes.io/projected/617cf016-593a-4d56-b104-e450cd6368ee-kube-api-access-ws6sh\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388659 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388681 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ed4e7481-d39e-45b7-98e6-8dceac35361c-signing-key\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388705 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skdpq\" (UniqueName: \"kubernetes.io/projected/2b8366f4-8622-49dd-a0ab-d832fd34bccd-kube-api-access-skdpq\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388738 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-policies\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388769 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fks2\" (UniqueName: \"kubernetes.io/projected/00792261-f23c-4fc8-a67b-4b7753b692a1-kube-api-access-9fks2\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388792 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/76a2b163-6490-4d77-947f-c7333cb25129-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.388814 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1172878a-36b0-44ca-9df0-7eff88ebadfd-serving-cert\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.391977 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/959e5098-562a-471a-9396-fed74ed113b5-config-volume\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.396513 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.397273 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.397353 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b553e21-002b-4905-8f32-6950d71db324-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.397658 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.397623 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-image-import-ca\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.399875 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61feb33b-8fe3-4ab5-b3a2-df7db4172225-config\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.400444 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-556st"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.400509 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qkh5b"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.400666 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-serving-cert\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.400903 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.401095 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-service-ca-bundle\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.401924 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-dir\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.402295 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f1deb2d-0d02-4085-8aba-d05024240e34-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.402697 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.403380 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e786864-e8b1-4a03-9327-14d389a5bc21-serving-cert\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.403718 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.403958 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.404079 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.404622 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-client-ca\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.404697 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-config\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.404924 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-trusted-ca-bundle\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.405356 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/959e5098-562a-471a-9396-fed74ed113b5-secret-volume\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.405904 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-policies\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.406174 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.406300 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b553e21-002b-4905-8f32-6950d71db324-images\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.407109 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-client-ca\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.406321 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b8366f4-8622-49dd-a0ab-d832fd34bccd-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.407718 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/617cf016-593a-4d56-b104-e450cd6368ee-audit-dir\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.407775 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-serving-cert\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.407882 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/76a2b163-6490-4d77-947f-c7333cb25129-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.408025 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8pb7c"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.408289 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0bed6a9-002b-40fc-89d5-031568b7e47c-serving-cert\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.408815 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-oauth-serving-cert\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.409732 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61feb33b-8fe3-4ab5-b3a2-df7db4172225-trusted-ca\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.410880 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f1deb2d-0d02-4085-8aba-d05024240e34-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.412235 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/617cf016-593a-4d56-b104-e450cd6368ee-node-pullsecrets\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.412257 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-config\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.412408 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-config\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.412902 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-config\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.412979 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.413768 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-service-ca\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.413981 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0bed6a9-002b-40fc-89d5-031568b7e47c-config\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.414190 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.414582 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ed4e7481-d39e-45b7-98e6-8dceac35361c-signing-cabundle\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.414977 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-audit\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.415096 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/617cf016-593a-4d56-b104-e450cd6368ee-etcd-serving-ca\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.415455 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.415769 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ed4e7481-d39e-45b7-98e6-8dceac35361c-signing-key\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.416127 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-encryption-config\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.416138 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b553e21-002b-4905-8f32-6950d71db324-proxy-tls\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.416941 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.416963 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.417167 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.417124 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g7bns"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.417660 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03c59650-42ea-4995-ada6-f86eb8aed1de-serving-cert\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.418295 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.418808 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.419834 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.419844 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/76a2b163-6490-4d77-947f-c7333cb25129-proxy-tls\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.421979 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.423765 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7smdt"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.425024 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.426333 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mtf22"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.427976 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.428156 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-oauth-config\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.428227 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-sjkkc"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.428647 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-w8czv\" (UID: \"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.429240 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/617cf016-593a-4d56-b104-e450cd6368ee-etcd-client\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.430135 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b8366f4-8622-49dd-a0ab-d832fd34bccd-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.430303 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.431489 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61feb33b-8fe3-4ab5-b3a2-df7db4172225-serving-cert\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.431592 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.432666 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.433598 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.435809 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kxlr2"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.438079 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.439726 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-78cdm"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.441062 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.443113 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.445021 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7hhsc"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.446480 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.447486 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-m9sh2"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.448623 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sh5l7"] Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.452092 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.471789 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.490825 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3ad786b-c5ca-47ba-8188-680f6c13192b-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.491600 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glfrn\" (UniqueName: \"kubernetes.io/projected/23dc36ea-02d4-493e-863b-56126c624ab0-kube-api-access-glfrn\") pod \"downloads-7954f5f757-7smdt\" (UID: \"23dc36ea-02d4-493e-863b-56126c624ab0\") " pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.491780 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.491912 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c94527b-255d-4486-9a36-9fd0f7efd4d7-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.492036 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a509d814-6573-4bfd-92c7-8d15a2d47228-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rgbkv\" (UID: \"a509d814-6573-4bfd-92c7-8d15a2d47228\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.492208 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1172878a-36b0-44ca-9df0-7eff88ebadfd-serving-cert\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.492457 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c94527b-255d-4486-9a36-9fd0f7efd4d7-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.493195 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x4f9\" (UniqueName: \"kubernetes.io/projected/3c94527b-255d-4486-9a36-9fd0f7efd4d7-kube-api-access-6x4f9\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.493342 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm7cr\" (UniqueName: \"kubernetes.io/projected/a509d814-6573-4bfd-92c7-8d15a2d47228-kube-api-access-mm7cr\") pod \"cluster-samples-operator-665b6dd947-rgbkv\" (UID: \"a509d814-6573-4bfd-92c7-8d15a2d47228\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.493463 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b3ad786b-c5ca-47ba-8188-680f6c13192b-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.493767 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/abc0551e-9e8b-4466-9eea-1da60ead65bb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kwm5b\" (UID: \"abc0551e-9e8b-4466-9eea-1da60ead65bb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.493792 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c94527b-255d-4486-9a36-9fd0f7efd4d7-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.494097 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1172878a-36b0-44ca-9df0-7eff88ebadfd-config\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.494260 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zx2b\" (UniqueName: \"kubernetes.io/projected/1172878a-36b0-44ca-9df0-7eff88ebadfd-kube-api-access-6zx2b\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.494365 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ad786b-c5ca-47ba-8188-680f6c13192b-config\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.494512 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc6b8\" (UniqueName: \"kubernetes.io/projected/abc0551e-9e8b-4466-9eea-1da60ead65bb-kube-api-access-xc6b8\") pod \"package-server-manager-789f6589d5-kwm5b\" (UID: \"abc0551e-9e8b-4466-9eea-1da60ead65bb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.496036 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c94527b-255d-4486-9a36-9fd0f7efd4d7-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.512499 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.518108 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/abc0551e-9e8b-4466-9eea-1da60ead65bb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kwm5b\" (UID: \"abc0551e-9e8b-4466-9eea-1da60ead65bb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.531595 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.552146 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.571826 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.592806 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.611871 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.631619 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.651320 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.656408 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1172878a-36b0-44ca-9df0-7eff88ebadfd-serving-cert\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.671718 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.675001 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1172878a-36b0-44ca-9df0-7eff88ebadfd-config\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.691226 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.711861 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.732642 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.751928 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.772369 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.802418 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.812783 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.831988 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.852823 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.872564 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.890982 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.912796 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.931367 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.953196 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.972827 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.987919 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a509d814-6573-4bfd-92c7-8d15a2d47228-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rgbkv\" (UID: \"a509d814-6573-4bfd-92c7-8d15a2d47228\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:14 crc kubenswrapper[4921]: I1210 12:58:14.992893 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.014759 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.032244 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.051980 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.067072 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3ad786b-c5ca-47ba-8188-680f6c13192b-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.072315 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.075956 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ad786b-c5ca-47ba-8188-680f6c13192b-config\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.132422 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.140925 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcswl\" (UniqueName: \"kubernetes.io/projected/9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4-kube-api-access-gcswl\") pod \"machine-api-operator-5694c8668f-jwb9f\" (UID: \"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.154216 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.191350 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bscb\" (UniqueName: \"kubernetes.io/projected/c265a212-7c72-44e8-9e5e-cfef9fabdbfe-kube-api-access-5bscb\") pod \"apiserver-7bbb656c7d-d8m4j\" (UID: \"c265a212-7c72-44e8-9e5e-cfef9fabdbfe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.191858 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.212547 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.232685 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.251569 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.266999 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.277751 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.290445 4921 request.go:700] Waited for 1.001476643s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-config-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.293370 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.296484 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.312447 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.336516 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.352032 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.372700 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.392543 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.413647 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.433455 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.451090 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.471377 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.512122 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.532459 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.552258 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.555444 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j"] Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.559362 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jwb9f"] Dec 10 12:58:15 crc kubenswrapper[4921]: W1210 12:58:15.565714 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c7fe3ab_dc6a_44fb_9c30_070f46cfd0f4.slice/crio-2e6b66ae3d927b3fb0636b8ce3b6f27caaccfb75b15021598b5bf27b455664c4 WatchSource:0}: Error finding container 2e6b66ae3d927b3fb0636b8ce3b6f27caaccfb75b15021598b5bf27b455664c4: Status 404 returned error can't find the container with id 2e6b66ae3d927b3fb0636b8ce3b6f27caaccfb75b15021598b5bf27b455664c4 Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.570820 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.591878 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.610993 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.631800 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.651458 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.670820 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.691272 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.711888 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.731082 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.753003 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.771717 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.790577 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" event={"ID":"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4","Type":"ContainerStarted","Data":"bef220fa7ca8af5af253d62cfdb67fa4ac321d4ed8265d584cfd8f9148a5497c"} Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.790618 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" event={"ID":"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4","Type":"ContainerStarted","Data":"b590955074663716376ed4ee8aee9669cffd66ad46abd6dd71d7da3c5b6d2df6"} Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.790628 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" event={"ID":"9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4","Type":"ContainerStarted","Data":"2e6b66ae3d927b3fb0636b8ce3b6f27caaccfb75b15021598b5bf27b455664c4"} Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.791231 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.792260 4921 generic.go:334] "Generic (PLEG): container finished" podID="c265a212-7c72-44e8-9e5e-cfef9fabdbfe" containerID="ee9b47b79854e439fdf0a17fbc3c4a86528bcf90ef05807d24b8181e449f4d77" exitCode=0 Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.792293 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" event={"ID":"c265a212-7c72-44e8-9e5e-cfef9fabdbfe","Type":"ContainerDied","Data":"ee9b47b79854e439fdf0a17fbc3c4a86528bcf90ef05807d24b8181e449f4d77"} Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.792323 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" event={"ID":"c265a212-7c72-44e8-9e5e-cfef9fabdbfe","Type":"ContainerStarted","Data":"ca8fa4012ec9e95f30ed4eb1c1b1af71dafd01f00187613589fef20af86db97b"} Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.811981 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.831686 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.851047 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.871730 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.891958 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.911427 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.932793 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.951020 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.985556 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 10 12:58:15 crc kubenswrapper[4921]: I1210 12:58:15.993569 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.010994 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.031533 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.051081 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.072252 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.091334 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.112108 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.130607 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.151050 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.171814 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.191537 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.212448 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.231979 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.252210 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.294485 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcs8n\" (UniqueName: \"kubernetes.io/projected/ed4e7481-d39e-45b7-98e6-8dceac35361c-kube-api-access-tcs8n\") pod \"service-ca-9c57cc56f-qkh5b\" (UID: \"ed4e7481-d39e-45b7-98e6-8dceac35361c\") " pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.310452 4921 request.go:700] Waited for 1.91206944s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/serviceaccounts/collect-profiles/token Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.310791 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k94kf\" (UniqueName: \"kubernetes.io/projected/ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4-kube-api-access-k94kf\") pod \"control-plane-machine-set-operator-78cbb6b69f-w8czv\" (UID: \"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.331946 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.333115 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp5br\" (UniqueName: \"kubernetes.io/projected/959e5098-562a-471a-9396-fed74ed113b5-kube-api-access-kp5br\") pod \"collect-profiles-29422845-kdjmt\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.352216 4921 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.371966 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.412533 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csv6g\" (UniqueName: \"kubernetes.io/projected/03c59650-42ea-4995-ada6-f86eb8aed1de-kube-api-access-csv6g\") pod \"route-controller-manager-6576b87f9c-55m5g\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.431852 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws6sh\" (UniqueName: \"kubernetes.io/projected/617cf016-593a-4d56-b104-e450cd6368ee-kube-api-access-ws6sh\") pod \"apiserver-76f77b778f-qq2ff\" (UID: \"617cf016-593a-4d56-b104-e450cd6368ee\") " pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.459776 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hmv5\" (UniqueName: \"kubernetes.io/projected/5b553e21-002b-4905-8f32-6950d71db324-kube-api-access-2hmv5\") pod \"machine-config-operator-74547568cd-bx4bn\" (UID: \"5b553e21-002b-4905-8f32-6950d71db324\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.465957 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr26f\" (UniqueName: \"kubernetes.io/projected/9ab094b2-78dc-4ee6-b563-a1ae064588cf-kube-api-access-kr26f\") pod \"marketplace-operator-79b997595-gwnkp\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.479123 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.489267 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skdpq\" (UniqueName: \"kubernetes.io/projected/2b8366f4-8622-49dd-a0ab-d832fd34bccd-kube-api-access-skdpq\") pod \"openshift-apiserver-operator-796bbdcf4f-jpzts\" (UID: \"2b8366f4-8622-49dd-a0ab-d832fd34bccd\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.510371 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5fhj\" (UniqueName: \"kubernetes.io/projected/4ae7e30d-338b-464e-baef-c7e304ff67d9-kube-api-access-k5fhj\") pod \"migrator-59844c95c7-8fpc5\" (UID: \"4ae7e30d-338b-464e-baef-c7e304ff67d9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.520862 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.526839 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.539089 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8s8t\" (UniqueName: \"kubernetes.io/projected/76a2b163-6490-4d77-947f-c7333cb25129-kube-api-access-l8s8t\") pod \"machine-config-controller-84d6567774-6b7sj\" (UID: \"76a2b163-6490-4d77-947f-c7333cb25129\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.553020 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks8fc\" (UniqueName: \"kubernetes.io/projected/ad2115a5-1371-4a19-b1e8-7f93a7719a71-kube-api-access-ks8fc\") pod \"console-f9d7485db-8pb7c\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.558878 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.575237 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.577652 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f1deb2d-0d02-4085-8aba-d05024240e34-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-cgqtr\" (UID: \"3f1deb2d-0d02-4085-8aba-d05024240e34\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.582950 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.594792 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fks2\" (UniqueName: \"kubernetes.io/projected/00792261-f23c-4fc8-a67b-4b7753b692a1-kube-api-access-9fks2\") pod \"oauth-openshift-558db77b4-g7bns\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.597379 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.600817 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.613695 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkmwp\" (UniqueName: \"kubernetes.io/projected/f0bed6a9-002b-40fc-89d5-031568b7e47c-kube-api-access-kkmwp\") pod \"authentication-operator-69f744f599-5f9l6\" (UID: \"f0bed6a9-002b-40fc-89d5-031568b7e47c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.641179 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62qtp\" (UniqueName: \"kubernetes.io/projected/2e786864-e8b1-4a03-9327-14d389a5bc21-kube-api-access-62qtp\") pod \"controller-manager-879f6c89f-mtf22\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.653454 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.663571 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j4s5\" (UniqueName: \"kubernetes.io/projected/61feb33b-8fe3-4ab5-b3a2-df7db4172225-kube-api-access-7j4s5\") pod \"console-operator-58897d9998-dbzbp\" (UID: \"61feb33b-8fe3-4ab5-b3a2-df7db4172225\") " pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.680927 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glfrn\" (UniqueName: \"kubernetes.io/projected/23dc36ea-02d4-493e-863b-56126c624ab0-kube-api-access-glfrn\") pod \"downloads-7954f5f757-7smdt\" (UID: \"23dc36ea-02d4-493e-863b-56126c624ab0\") " pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.713689 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x4f9\" (UniqueName: \"kubernetes.io/projected/3c94527b-255d-4486-9a36-9fd0f7efd4d7-kube-api-access-6x4f9\") pod \"kube-storage-version-migrator-operator-b67b599dd-rslp7\" (UID: \"3c94527b-255d-4486-9a36-9fd0f7efd4d7\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.722005 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm7cr\" (UniqueName: \"kubernetes.io/projected/a509d814-6573-4bfd-92c7-8d15a2d47228-kube-api-access-mm7cr\") pod \"cluster-samples-operator-665b6dd947-rgbkv\" (UID: \"a509d814-6573-4bfd-92c7-8d15a2d47228\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.732328 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qq2ff"] Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.735588 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b3ad786b-c5ca-47ba-8188-680f6c13192b-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jmkzr\" (UID: \"b3ad786b-c5ca-47ba-8188-680f6c13192b\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.743312 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.753842 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.756442 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.768365 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zx2b\" (UniqueName: \"kubernetes.io/projected/1172878a-36b0-44ca-9df0-7eff88ebadfd-kube-api-access-6zx2b\") pod \"service-ca-operator-777779d784-nddsp\" (UID: \"1172878a-36b0-44ca-9df0-7eff88ebadfd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.777710 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.781358 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc6b8\" (UniqueName: \"kubernetes.io/projected/abc0551e-9e8b-4466-9eea-1da60ead65bb-kube-api-access-xc6b8\") pod \"package-server-manager-789f6589d5-kwm5b\" (UID: \"abc0551e-9e8b-4466-9eea-1da60ead65bb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.787798 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" Dec 10 12:58:16 crc kubenswrapper[4921]: W1210 12:58:16.797311 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod617cf016_593a_4d56_b104_e450cd6368ee.slice/crio-3bec37279217c481a186a92e6d9bcbf0856967fe0637cd92010dc93d9c042cea WatchSource:0}: Error finding container 3bec37279217c481a186a92e6d9bcbf0856967fe0637cd92010dc93d9c042cea: Status 404 returned error can't find the container with id 3bec37279217c481a186a92e6d9bcbf0856967fe0637cd92010dc93d9c042cea Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.806217 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" event={"ID":"c265a212-7c72-44e8-9e5e-cfef9fabdbfe","Type":"ContainerStarted","Data":"33e561c6470d9af2b46604a38e3a6dc6a9a7d42535d6d7b800ce979df7d329db"} Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.806610 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.834158 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.841871 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.848875 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdzhn\" (UniqueName: \"kubernetes.io/projected/eb53e918-67d3-472c-a390-d9a4e8f44fee-kube-api-access-cdzhn\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.848914 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-bound-sa-token\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.848932 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae0b7e23-ca18-4adc-aa3a-551c273d45af-ca-trust-extracted\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.848952 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/451a15e9-0c7d-4999-b37d-d255aa272b3a-auth-proxy-config\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851367 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs9vr\" (UniqueName: \"kubernetes.io/projected/ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4-kube-api-access-qs9vr\") pod \"multus-admission-controller-857f4d67dd-kxlr2\" (UID: \"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851501 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3b87905a-cfdd-4617-90e8-38eb01147070-profile-collector-cert\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851523 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/451a15e9-0c7d-4999-b37d-d255aa272b3a-machine-approver-tls\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851546 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/eb53e918-67d3-472c-a390-d9a4e8f44fee-srv-cert\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851571 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851603 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-tls\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851657 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prwth\" (UniqueName: \"kubernetes.io/projected/451a15e9-0c7d-4999-b37d-d255aa272b3a-kube-api-access-prwth\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851672 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt9hb\" (UniqueName: \"kubernetes.io/projected/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-kube-api-access-gt9hb\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851742 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-apiservice-cert\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851774 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/451a15e9-0c7d-4999-b37d-d255aa272b3a-config\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851790 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kxlr2\" (UID: \"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851805 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-trusted-ca\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851854 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3b87905a-cfdd-4617-90e8-38eb01147070-srv-cert\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851868 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/eb53e918-67d3-472c-a390-d9a4e8f44fee-profile-collector-cert\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851896 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-tmpfs\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851932 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae0b7e23-ca18-4adc-aa3a-551c273d45af-installation-pull-secrets\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.851969 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwcr7\" (UniqueName: \"kubernetes.io/projected/3b87905a-cfdd-4617-90e8-38eb01147070-kube-api-access-hwcr7\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.852025 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-certificates\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.852049 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg59x\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-kube-api-access-tg59x\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.852065 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-webhook-cert\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.849958 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" Dec 10 12:58:16 crc kubenswrapper[4921]: E1210 12:58:16.852804 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.352793867 +0000 UTC m=+94.569015791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.867607 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.911604 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g"] Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.911704 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.923753 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.925091 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.956542 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.956912 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-ca\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.956969 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdzhn\" (UniqueName: \"kubernetes.io/projected/eb53e918-67d3-472c-a390-d9a4e8f44fee-kube-api-access-cdzhn\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.956995 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-bound-sa-token\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957036 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-stats-auth\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:16 crc kubenswrapper[4921]: E1210 12:58:16.957137 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.457085616 +0000 UTC m=+94.673307540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957293 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw28f\" (UniqueName: \"kubernetes.io/projected/11c4ff79-c760-4e5d-8594-8dd82990dec0-kube-api-access-gw28f\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957354 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwvjq\" (UniqueName: \"kubernetes.io/projected/9831d515-5a42-4f3e-98a9-aca48b1093d8-kube-api-access-fwvjq\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957380 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83010039-4d8d-46c5-9069-585653c86378-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957583 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/691beb9c-4f21-4efc-b00d-731786bc427d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957726 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae0b7e23-ca18-4adc-aa3a-551c273d45af-ca-trust-extracted\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957749 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9a307988-ced0-4f7b-b6d2-e981c88eabd7-metrics-tls\") pod \"dns-operator-744455d44c-sjkkc\" (UID: \"9a307988-ced0-4f7b-b6d2-e981c88eabd7\") " pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957772 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8r4r\" (UniqueName: \"kubernetes.io/projected/bd4b30ef-c583-4142-874c-806c9038a954-kube-api-access-s8r4r\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.957906 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/451a15e9-0c7d-4999-b37d-d255aa272b3a-auth-proxy-config\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958048 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs9vr\" (UniqueName: \"kubernetes.io/projected/ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4-kube-api-access-qs9vr\") pod \"multus-admission-controller-857f4d67dd-kxlr2\" (UID: \"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958085 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49b1e180-71ea-45cf-bc4f-4b2283aa366e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958217 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3b87905a-cfdd-4617-90e8-38eb01147070-profile-collector-cert\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958284 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/451a15e9-0c7d-4999-b37d-d255aa272b3a-machine-approver-tls\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958312 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-socket-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958543 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958590 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/eb53e918-67d3-472c-a390-d9a4e8f44fee-srv-cert\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.958619 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-csi-data-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960318 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/83010039-4d8d-46c5-9069-585653c86378-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960348 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-tls\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960371 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49b1e180-71ea-45cf-bc4f-4b2283aa366e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960430 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-client\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960466 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/691beb9c-4f21-4efc-b00d-731786bc427d-config\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960491 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m2bn\" (UniqueName: \"kubernetes.io/projected/a24ccf53-2780-49dd-9f26-d80a42631230-kube-api-access-5m2bn\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960539 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prwth\" (UniqueName: \"kubernetes.io/projected/451a15e9-0c7d-4999-b37d-d255aa272b3a-kube-api-access-prwth\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960563 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a24ccf53-2780-49dd-9f26-d80a42631230-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960585 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/07f665ea-3510-48cc-bacf-b92172029dce-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960602 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-default-certificate\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960626 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt9hb\" (UniqueName: \"kubernetes.io/projected/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-kube-api-access-gt9hb\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960661 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-mountpoint-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960706 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/451a15e9-0c7d-4999-b37d-d255aa272b3a-config\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960724 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kxlr2\" (UID: \"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960745 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-apiservice-cert\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960779 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-trusted-ca\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960799 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-registration-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960829 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11c4ff79-c760-4e5d-8594-8dd82990dec0-service-ca-bundle\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960917 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntdkt\" (UniqueName: \"kubernetes.io/projected/9a307988-ced0-4f7b-b6d2-e981c88eabd7-kube-api-access-ntdkt\") pod \"dns-operator-744455d44c-sjkkc\" (UID: \"9a307988-ced0-4f7b-b6d2-e981c88eabd7\") " pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.960970 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7nzx\" (UniqueName: \"kubernetes.io/projected/34937a53-6292-4363-ad11-9d5e180b13b5-kube-api-access-v7nzx\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961006 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46cbm\" (UniqueName: \"kubernetes.io/projected/49b1e180-71ea-45cf-bc4f-4b2283aa366e-kube-api-access-46cbm\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961026 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-metrics-certs\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961047 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34937a53-6292-4363-ad11-9d5e180b13b5-metrics-tls\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961111 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3b87905a-cfdd-4617-90e8-38eb01147070-srv-cert\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961148 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/eb53e918-67d3-472c-a390-d9a4e8f44fee-profile-collector-cert\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961170 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/07f665ea-3510-48cc-bacf-b92172029dce-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961201 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2d27a376-92a0-462a-b9c1-70aea2ea58f2-node-bootstrap-token\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961256 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-tmpfs\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961276 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd4b30ef-c583-4142-874c-806c9038a954-config-volume\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961330 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34937a53-6292-4363-ad11-9d5e180b13b5-trusted-ca\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961351 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34937a53-6292-4363-ad11-9d5e180b13b5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961370 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-serving-cert\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961403 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07f665ea-3510-48cc-bacf-b92172029dce-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961434 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bd4b30ef-c583-4142-874c-806c9038a954-metrics-tls\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961504 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae0b7e23-ca18-4adc-aa3a-551c273d45af-installation-pull-secrets\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961525 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/691beb9c-4f21-4efc-b00d-731786bc427d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961543 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-service-ca\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961606 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24ccf53-2780-49dd-9f26-d80a42631230-serving-cert\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961627 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl6k2\" (UniqueName: \"kubernetes.io/projected/2d27a376-92a0-462a-b9c1-70aea2ea58f2-kube-api-access-pl6k2\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961675 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwcr7\" (UniqueName: \"kubernetes.io/projected/3b87905a-cfdd-4617-90e8-38eb01147070-kube-api-access-hwcr7\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961695 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/07f665ea-3510-48cc-bacf-b92172029dce-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961713 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hgzt\" (UniqueName: \"kubernetes.io/projected/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-kube-api-access-4hgzt\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961749 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9bc2\" (UniqueName: \"kubernetes.io/projected/00babddd-5238-454a-92dc-ba2954d36d78-kube-api-access-x9bc2\") pod \"ingress-canary-78cdm\" (UID: \"00babddd-5238-454a-92dc-ba2954d36d78\") " pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961812 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/00babddd-5238-454a-92dc-ba2954d36d78-cert\") pod \"ingress-canary-78cdm\" (UID: \"00babddd-5238-454a-92dc-ba2954d36d78\") " pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961833 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83010039-4d8d-46c5-9069-585653c86378-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961852 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-plugins-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961874 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-config\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961944 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-certificates\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961965 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg59x\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-kube-api-access-tg59x\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.961986 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-webhook-cert\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.962006 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2d27a376-92a0-462a-b9c1-70aea2ea58f2-certs\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.962027 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/07f665ea-3510-48cc-bacf-b92172029dce-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.962095 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d4r2\" (UniqueName: \"kubernetes.io/projected/83010039-4d8d-46c5-9069-585653c86378-kube-api-access-4d4r2\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.968784 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/451a15e9-0c7d-4999-b37d-d255aa272b3a-auth-proxy-config\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.974689 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/451a15e9-0c7d-4999-b37d-d255aa272b3a-machine-approver-tls\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.981916 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/451a15e9-0c7d-4999-b37d-d255aa272b3a-config\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.983021 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/eb53e918-67d3-472c-a390-d9a4e8f44fee-srv-cert\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:16 crc kubenswrapper[4921]: E1210 12:58:16.983127 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.476951748 +0000 UTC m=+94.693173672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:16 crc kubenswrapper[4921]: I1210 12:58:16.984193 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae0b7e23-ca18-4adc-aa3a-551c273d45af-ca-trust-extracted\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.000118 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kxlr2\" (UID: \"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.000981 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-tls\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.002125 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3b87905a-cfdd-4617-90e8-38eb01147070-profile-collector-cert\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.002577 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3b87905a-cfdd-4617-90e8-38eb01147070-srv-cert\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.003269 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qkh5b"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.008230 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-tmpfs\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.009642 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-certificates\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.017325 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-trusted-ca\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.018159 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/eb53e918-67d3-472c-a390-d9a4e8f44fee-profile-collector-cert\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.021692 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-apiservice-cert\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.026680 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.027209 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae0b7e23-ca18-4adc-aa3a-551c273d45af-installation-pull-secrets\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.031376 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdzhn\" (UniqueName: \"kubernetes.io/projected/eb53e918-67d3-472c-a390-d9a4e8f44fee-kube-api-access-cdzhn\") pod \"catalog-operator-68c6474976-ccdh5\" (UID: \"eb53e918-67d3-472c-a390-d9a4e8f44fee\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.044903 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-bound-sa-token\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.051841 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-webhook-cert\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.062810 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs9vr\" (UniqueName: \"kubernetes.io/projected/ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4-kube-api-access-qs9vr\") pod \"multus-admission-controller-857f4d67dd-kxlr2\" (UID: \"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.062911 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063277 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-default-certificate\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063317 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/07f665ea-3510-48cc-bacf-b92172029dce-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.063345 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.563312418 +0000 UTC m=+94.779534342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063486 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-mountpoint-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063555 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-registration-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063584 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11c4ff79-c760-4e5d-8594-8dd82990dec0-service-ca-bundle\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063614 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntdkt\" (UniqueName: \"kubernetes.io/projected/9a307988-ced0-4f7b-b6d2-e981c88eabd7-kube-api-access-ntdkt\") pod \"dns-operator-744455d44c-sjkkc\" (UID: \"9a307988-ced0-4f7b-b6d2-e981c88eabd7\") " pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.063647 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7nzx\" (UniqueName: \"kubernetes.io/projected/34937a53-6292-4363-ad11-9d5e180b13b5-kube-api-access-v7nzx\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.070882 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-metrics-certs\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.070930 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46cbm\" (UniqueName: \"kubernetes.io/projected/49b1e180-71ea-45cf-bc4f-4b2283aa366e-kube-api-access-46cbm\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.070952 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34937a53-6292-4363-ad11-9d5e180b13b5-metrics-tls\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071030 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/07f665ea-3510-48cc-bacf-b92172029dce-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071069 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2d27a376-92a0-462a-b9c1-70aea2ea58f2-node-bootstrap-token\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071168 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd4b30ef-c583-4142-874c-806c9038a954-config-volume\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071202 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07f665ea-3510-48cc-bacf-b92172029dce-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.065282 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-mountpoint-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071223 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34937a53-6292-4363-ad11-9d5e180b13b5-trusted-ca\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071303 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34937a53-6292-4363-ad11-9d5e180b13b5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071325 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-serving-cert\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071347 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bd4b30ef-c583-4142-874c-806c9038a954-metrics-tls\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071380 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/691beb9c-4f21-4efc-b00d-731786bc427d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071437 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24ccf53-2780-49dd-9f26-d80a42631230-serving-cert\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071470 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-service-ca\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071503 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl6k2\" (UniqueName: \"kubernetes.io/projected/2d27a376-92a0-462a-b9c1-70aea2ea58f2-kube-api-access-pl6k2\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071537 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/07f665ea-3510-48cc-bacf-b92172029dce-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071570 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hgzt\" (UniqueName: \"kubernetes.io/projected/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-kube-api-access-4hgzt\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071599 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9bc2\" (UniqueName: \"kubernetes.io/projected/00babddd-5238-454a-92dc-ba2954d36d78-kube-api-access-x9bc2\") pod \"ingress-canary-78cdm\" (UID: \"00babddd-5238-454a-92dc-ba2954d36d78\") " pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071642 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/00babddd-5238-454a-92dc-ba2954d36d78-cert\") pod \"ingress-canary-78cdm\" (UID: \"00babddd-5238-454a-92dc-ba2954d36d78\") " pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071662 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83010039-4d8d-46c5-9069-585653c86378-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071683 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-plugins-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071702 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-config\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071742 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2d27a376-92a0-462a-b9c1-70aea2ea58f2-certs\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071758 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/07f665ea-3510-48cc-bacf-b92172029dce-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071775 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt9hb\" (UniqueName: \"kubernetes.io/projected/6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d-kube-api-access-gt9hb\") pod \"packageserver-d55dfcdfc-dkwtw\" (UID: \"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071780 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d4r2\" (UniqueName: \"kubernetes.io/projected/83010039-4d8d-46c5-9069-585653c86378-kube-api-access-4d4r2\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071857 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-ca\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071886 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83010039-4d8d-46c5-9069-585653c86378-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071912 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/691beb9c-4f21-4efc-b00d-731786bc427d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071931 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-stats-auth\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071953 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw28f\" (UniqueName: \"kubernetes.io/projected/11c4ff79-c760-4e5d-8594-8dd82990dec0-kube-api-access-gw28f\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071971 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwvjq\" (UniqueName: \"kubernetes.io/projected/9831d515-5a42-4f3e-98a9-aca48b1093d8-kube-api-access-fwvjq\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.071990 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9a307988-ced0-4f7b-b6d2-e981c88eabd7-metrics-tls\") pod \"dns-operator-744455d44c-sjkkc\" (UID: \"9a307988-ced0-4f7b-b6d2-e981c88eabd7\") " pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072008 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8r4r\" (UniqueName: \"kubernetes.io/projected/bd4b30ef-c583-4142-874c-806c9038a954-kube-api-access-s8r4r\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072049 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49b1e180-71ea-45cf-bc4f-4b2283aa366e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072088 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-socket-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072111 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-csi-data-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072137 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072165 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/83010039-4d8d-46c5-9069-585653c86378-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072199 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49b1e180-71ea-45cf-bc4f-4b2283aa366e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.072215 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-client\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.074056 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/34937a53-6292-4363-ad11-9d5e180b13b5-trusted-ca\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.066301 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11c4ff79-c760-4e5d-8594-8dd82990dec0-service-ca-bundle\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.068235 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.068262 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/07f665ea-3510-48cc-bacf-b92172029dce-service-ca\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.065570 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-registration-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.076531 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9a307988-ced0-4f7b-b6d2-e981c88eabd7-metrics-tls\") pod \"dns-operator-744455d44c-sjkkc\" (UID: \"9a307988-ced0-4f7b-b6d2-e981c88eabd7\") " pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.076558 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/07f665ea-3510-48cc-bacf-b92172029dce-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.077892 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-config\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.078046 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-plugins-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.079689 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/00babddd-5238-454a-92dc-ba2954d36d78-cert\") pod \"ingress-canary-78cdm\" (UID: \"00babddd-5238-454a-92dc-ba2954d36d78\") " pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.080543 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/2d27a376-92a0-462a-b9c1-70aea2ea58f2-node-bootstrap-token\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.081157 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd4b30ef-c583-4142-874c-806c9038a954-config-volume\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.083471 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-stats-auth\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.083885 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.583870049 +0000 UTC m=+94.800091973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.084630 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-ca\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.085937 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/83010039-4d8d-46c5-9069-585653c86378-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.088801 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/07f665ea-3510-48cc-bacf-b92172029dce-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.091058 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83010039-4d8d-46c5-9069-585653c86378-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.094276 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49b1e180-71ea-45cf-bc4f-4b2283aa366e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.094447 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-service-ca\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.094528 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/691beb9c-4f21-4efc-b00d-731786bc427d-config\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095002 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-csi-data-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095086 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/691beb9c-4f21-4efc-b00d-731786bc427d-config\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095144 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/9831d515-5a42-4f3e-98a9-aca48b1093d8-socket-dir\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095619 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49b1e180-71ea-45cf-bc4f-4b2283aa366e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095625 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a24ccf53-2780-49dd-9f26-d80a42631230-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095674 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m2bn\" (UniqueName: \"kubernetes.io/projected/a24ccf53-2780-49dd-9f26-d80a42631230-kube-api-access-5m2bn\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.095914 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/a24ccf53-2780-49dd-9f26-d80a42631230-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.096109 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/34937a53-6292-4363-ad11-9d5e180b13b5-metrics-tls\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.097758 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/2d27a376-92a0-462a-b9c1-70aea2ea58f2-certs\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.101614 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-metrics-certs\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.102973 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24ccf53-2780-49dd-9f26-d80a42631230-serving-cert\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.107555 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.112604 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/11c4ff79-c760-4e5d-8594-8dd82990dec0-default-certificate\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.116097 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-serving-cert\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.116337 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/691beb9c-4f21-4efc-b00d-731786bc427d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.116771 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bd4b30ef-c583-4142-874c-806c9038a954-metrics-tls\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.117959 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-etcd-client\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.118368 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prwth\" (UniqueName: \"kubernetes.io/projected/451a15e9-0c7d-4999-b37d-d255aa272b3a-kube-api-access-prwth\") pod \"machine-approver-56656f9798-nvgsl\" (UID: \"451a15e9-0c7d-4999-b37d-d255aa272b3a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.120128 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07f665ea-3510-48cc-bacf-b92172029dce-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.123795 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwcr7\" (UniqueName: \"kubernetes.io/projected/3b87905a-cfdd-4617-90e8-38eb01147070-kube-api-access-hwcr7\") pod \"olm-operator-6b444d44fb-mnz9t\" (UID: \"3b87905a-cfdd-4617-90e8-38eb01147070\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.142727 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.149438 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.150092 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg59x\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-kube-api-access-tg59x\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.150848 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.174038 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.190532 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.192123 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7nzx\" (UniqueName: \"kubernetes.io/projected/34937a53-6292-4363-ad11-9d5e180b13b5-kube-api-access-v7nzx\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.200159 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntdkt\" (UniqueName: \"kubernetes.io/projected/9a307988-ced0-4f7b-b6d2-e981c88eabd7-kube-api-access-ntdkt\") pod \"dns-operator-744455d44c-sjkkc\" (UID: \"9a307988-ced0-4f7b-b6d2-e981c88eabd7\") " pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.208148 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.209365 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.709347486 +0000 UTC m=+94.925569410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.232190 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46cbm\" (UniqueName: \"kubernetes.io/projected/49b1e180-71ea-45cf-bc4f-4b2283aa366e-kube-api-access-46cbm\") pod \"openshift-controller-manager-operator-756b6f6bc6-bbgxs\" (UID: \"49b1e180-71ea-45cf-bc4f-4b2283aa366e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.241416 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.244912 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d4r2\" (UniqueName: \"kubernetes.io/projected/83010039-4d8d-46c5-9069-585653c86378-kube-api-access-4d4r2\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.247105 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.285938 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/07f665ea-3510-48cc-bacf-b92172029dce-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-cwdx8\" (UID: \"07f665ea-3510-48cc-bacf-b92172029dce\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.286749 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.302025 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hgzt\" (UniqueName: \"kubernetes.io/projected/cb397e19-ed97-4db5-a9a0-a8da0d3a55a5-kube-api-access-4hgzt\") pod \"etcd-operator-b45778765-7hhsc\" (UID: \"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.304166 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9bc2\" (UniqueName: \"kubernetes.io/projected/00babddd-5238-454a-92dc-ba2954d36d78-kube-api-access-x9bc2\") pod \"ingress-canary-78cdm\" (UID: \"00babddd-5238-454a-92dc-ba2954d36d78\") " pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.308460 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw28f\" (UniqueName: \"kubernetes.io/projected/11c4ff79-c760-4e5d-8594-8dd82990dec0-kube-api-access-gw28f\") pod \"router-default-5444994796-xtvnd\" (UID: \"11c4ff79-c760-4e5d-8594-8dd82990dec0\") " pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.313221 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.316362 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-78cdm" Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.328278 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.828253981 +0000 UTC m=+95.044475905 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.367525 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mtf22"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.369009 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.371697 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/691beb9c-4f21-4efc-b00d-731786bc427d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zkglp\" (UID: \"691beb9c-4f21-4efc-b00d-731786bc427d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.387496 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwvjq\" (UniqueName: \"kubernetes.io/projected/9831d515-5a42-4f3e-98a9-aca48b1093d8-kube-api-access-fwvjq\") pod \"csi-hostpathplugin-m9sh2\" (UID: \"9831d515-5a42-4f3e-98a9-aca48b1093d8\") " pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.388371 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl6k2\" (UniqueName: \"kubernetes.io/projected/2d27a376-92a0-462a-b9c1-70aea2ea58f2-kube-api-access-pl6k2\") pod \"machine-config-server-vp9nq\" (UID: \"2d27a376-92a0-462a-b9c1-70aea2ea58f2\") " pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.423694 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.424733 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:17.924696151 +0000 UTC m=+95.140918075 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.426109 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/83010039-4d8d-46c5-9069-585653c86378-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-kz2qx\" (UID: \"83010039-4d8d-46c5-9069-585653c86378\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.437246 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8r4r\" (UniqueName: \"kubernetes.io/projected/bd4b30ef-c583-4142-874c-806c9038a954-kube-api-access-s8r4r\") pod \"dns-default-sh5l7\" (UID: \"bd4b30ef-c583-4142-874c-806c9038a954\") " pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.438362 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/34937a53-6292-4363-ad11-9d5e180b13b5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5lrds\" (UID: \"34937a53-6292-4363-ad11-9d5e180b13b5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.482124 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m2bn\" (UniqueName: \"kubernetes.io/projected/a24ccf53-2780-49dd-9f26-d80a42631230-kube-api-access-5m2bn\") pod \"openshift-config-operator-7777fb866f-dhxg9\" (UID: \"a24ccf53-2780-49dd-9f26-d80a42631230\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.527410 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.527730 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.027719775 +0000 UTC m=+95.243941699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.539432 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:17 crc kubenswrapper[4921]: W1210 12:58:17.546253 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod451a15e9_0c7d_4999_b37d_d255aa272b3a.slice/crio-635d74a2a466c3df6986fa270c222b5525357b98cbfa0c1c2b01b85ee6df6a80 WatchSource:0}: Error finding container 635d74a2a466c3df6986fa270c222b5525357b98cbfa0c1c2b01b85ee6df6a80: Status 404 returned error can't find the container with id 635d74a2a466c3df6986fa270c222b5525357b98cbfa0c1c2b01b85ee6df6a80 Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.555959 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.571246 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.574516 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.575908 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.577604 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.580825 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.596794 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.612594 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" Dec 10 12:58:17 crc kubenswrapper[4921]: W1210 12:58:17.621320 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddb1c1c4_6c20_4fab_ba30_4d74c96ad4f4.slice/crio-dca705fc788fda20e68980953c63795c048d119bc9a4d5aeef23287f86a5fb09 WatchSource:0}: Error finding container dca705fc788fda20e68980953c63795c048d119bc9a4d5aeef23287f86a5fb09: Status 404 returned error can't find the container with id dca705fc788fda20e68980953c63795c048d119bc9a4d5aeef23287f86a5fb09 Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.624129 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.629608 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.630337 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.130317206 +0000 UTC m=+95.346539130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.633325 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vp9nq" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.649560 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.731535 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.732037 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.232026873 +0000 UTC m=+95.448248787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.839794 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.840357 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.340335043 +0000 UTC m=+95.556556967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.890239 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8pb7c"] Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.899740 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" event={"ID":"03c59650-42ea-4995-ada6-f86eb8aed1de","Type":"ContainerStarted","Data":"042ea5734c535957713ddcb332676dcd6b76c8c1d834b028e928c5e0d8df1928"} Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.916191 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" event={"ID":"3f1deb2d-0d02-4085-8aba-d05024240e34","Type":"ContainerStarted","Data":"19899ee2475815dd58a3573834264ca70ff6d6a18b1c009971b9233fa6e8aa59"} Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.956804 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" event={"ID":"ed4e7481-d39e-45b7-98e6-8dceac35361c","Type":"ContainerStarted","Data":"d2956752de101cfd6bc75293757e14b1d86d75727861768869559e25c1f8428c"} Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.956917 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:17 crc kubenswrapper[4921]: E1210 12:58:17.957294 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.457276353 +0000 UTC m=+95.673498477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.969108 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" event={"ID":"2e786864-e8b1-4a03-9327-14d389a5bc21","Type":"ContainerStarted","Data":"38b358b9451d96742aac5a769fa6d5bd3298c083faa8dcc5f43ab00441e87389"} Dec 10 12:58:17 crc kubenswrapper[4921]: I1210 12:58:17.999114 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dbzbp"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.003552 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" event={"ID":"76a2b163-6490-4d77-947f-c7333cb25129","Type":"ContainerStarted","Data":"f609d8ce5849361f05ade31d7217a26e308aa701e079403de4edc22dd3ded558"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.020939 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" event={"ID":"2b8366f4-8622-49dd-a0ab-d832fd34bccd","Type":"ContainerStarted","Data":"bd1635d04e96db51d1b9a4609c57683c1f582d1dddf1d3495eb0d2ab40790233"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.060076 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.060644 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.560621635 +0000 UTC m=+95.776843559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.105119 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" event={"ID":"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4","Type":"ContainerStarted","Data":"dca705fc788fda20e68980953c63795c048d119bc9a4d5aeef23287f86a5fb09"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.162569 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.163015 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.66299827 +0000 UTC m=+95.879220194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.171344 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" event={"ID":"617cf016-593a-4d56-b104-e450cd6368ee","Type":"ContainerStarted","Data":"3bec37279217c481a186a92e6d9bcbf0856967fe0637cd92010dc93d9c042cea"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.196951 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" event={"ID":"959e5098-562a-471a-9396-fed74ed113b5","Type":"ContainerStarted","Data":"59c448657088234167a192371822778dbeb991f6e1ab089a4dd5b2b5c2935a28"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.200245 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" event={"ID":"4ae7e30d-338b-464e-baef-c7e304ff67d9","Type":"ContainerStarted","Data":"1f87632b15b8844433cec567f02042636a26249e09473d85f291cbb0f739f750"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.208370 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" event={"ID":"451a15e9-0c7d-4999-b37d-d255aa272b3a","Type":"ContainerStarted","Data":"635d74a2a466c3df6986fa270c222b5525357b98cbfa0c1c2b01b85ee6df6a80"} Dec 10 12:58:18 crc kubenswrapper[4921]: W1210 12:58:18.216429 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07f665ea_3510_48cc_bacf_b92172029dce.slice/crio-0e5f461541b20d70199aa0d92ccd0790a62930dc8eeb9b38f8e102fc41bf3ed0 WatchSource:0}: Error finding container 0e5f461541b20d70199aa0d92ccd0790a62930dc8eeb9b38f8e102fc41bf3ed0: Status 404 returned error can't find the container with id 0e5f461541b20d70199aa0d92ccd0790a62930dc8eeb9b38f8e102fc41bf3ed0 Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.218184 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" event={"ID":"5b553e21-002b-4905-8f32-6950d71db324","Type":"ContainerStarted","Data":"a04851b215532bcc9674889339c3da06f877ca32a72b6d1c4e5f8d490c36d976"} Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.237207 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.263268 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.264841 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.265295 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.265765 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.765731185 +0000 UTC m=+95.981953109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.340601 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7smdt"] Dec 10 12:58:18 crc kubenswrapper[4921]: W1210 12:58:18.367163 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11c4ff79_c760_4e5d_8594_8dd82990dec0.slice/crio-a2da58f280074f20ac44fc5e5779403dd487337ee591509b2aa89dbccab06b47 WatchSource:0}: Error finding container a2da58f280074f20ac44fc5e5779403dd487337ee591509b2aa89dbccab06b47: Status 404 returned error can't find the container with id a2da58f280074f20ac44fc5e5779403dd487337ee591509b2aa89dbccab06b47 Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.368104 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g7bns"] Dec 10 12:58:18 crc kubenswrapper[4921]: W1210 12:58:18.368355 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61feb33b_8fe3_4ab5_b3a2_df7db4172225.slice/crio-ad0c48418be17befccadcd9fcd5a29f012288e7fb2b54d07b1c99cec3a7a6aad WatchSource:0}: Error finding container ad0c48418be17befccadcd9fcd5a29f012288e7fb2b54d07b1c99cec3a7a6aad: Status 404 returned error can't find the container with id ad0c48418be17befccadcd9fcd5a29f012288e7fb2b54d07b1c99cec3a7a6aad Dec 10 12:58:18 crc kubenswrapper[4921]: W1210 12:58:18.388642 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d27a376_92a0_462a_b9c1_70aea2ea58f2.slice/crio-4ab5f3f5e83f60dda73baea47edf427317e18772ae8c3e997e5136711085abcf WatchSource:0}: Error finding container 4ab5f3f5e83f60dda73baea47edf427317e18772ae8c3e997e5136711085abcf: Status 404 returned error can't find the container with id 4ab5f3f5e83f60dda73baea47edf427317e18772ae8c3e997e5136711085abcf Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.454916 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.458087 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gwnkp"] Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.460445 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:18.960428006 +0000 UTC m=+96.176649920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.559019 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.559446 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.059421918 +0000 UTC m=+96.275643842 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.559956 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.562049 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.06202965 +0000 UTC m=+96.278251574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.650143 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" podStartSLOduration=71.650108838 podStartE2EDuration="1m11.650108838s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:18.616245027 +0000 UTC m=+95.832466971" watchObservedRunningTime="2025-12-10 12:58:18.650108838 +0000 UTC m=+95.866330762" Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.662942 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.663250 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.163232283 +0000 UTC m=+96.379454207 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.684202 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nddsp"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.690345 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.743249 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" podStartSLOduration=71.743226066 podStartE2EDuration="1m11.743226066s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:18.718861299 +0000 UTC m=+95.935083233" watchObservedRunningTime="2025-12-10 12:58:18.743226066 +0000 UTC m=+95.959447980" Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.745472 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.746523 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.765704 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.766100 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.266086121 +0000 UTC m=+96.482308045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.782876 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-sjkkc"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.827352 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-jwb9f" podStartSLOduration=71.827327103 podStartE2EDuration="1m11.827327103s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:18.814424215 +0000 UTC m=+96.030646149" watchObservedRunningTime="2025-12-10 12:58:18.827327103 +0000 UTC m=+96.043549037" Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.868933 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.869558 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.369531846 +0000 UTC m=+96.585753770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.878476 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9"] Dec 10 12:58:18 crc kubenswrapper[4921]: I1210 12:58:18.970866 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:18 crc kubenswrapper[4921]: E1210 12:58:18.971469 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.471450179 +0000 UTC m=+96.687672103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.075001 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.075643 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.575618284 +0000 UTC m=+96.791840208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.107541 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kxlr2"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.115296 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.159779 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.205421 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.205897 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.705878613 +0000 UTC m=+96.922100537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.314087 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.314527 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.814498142 +0000 UTC m=+97.030720066 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.317264 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.319422 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.819376467 +0000 UTC m=+97.035598391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.334474 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5f9l6"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.334509 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-78cdm"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.334524 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7hhsc"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.337272 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" event={"ID":"ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4","Type":"ContainerStarted","Data":"ecc0740cdd35c77b7923d7c3feb049fa0f3f0798e4fda9c5fb3ba24708b6d3e5"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.400914 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w8czv" podStartSLOduration=72.400895303 podStartE2EDuration="1m12.400895303s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:19.37921114 +0000 UTC m=+96.595433074" watchObservedRunningTime="2025-12-10 12:58:19.400895303 +0000 UTC m=+96.617117227" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.402902 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.438457 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.438627 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.938599521 +0000 UTC m=+97.154821445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.439055 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.439482 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:19.939473305 +0000 UTC m=+97.155695229 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.464491 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vp9nq" event={"ID":"2d27a376-92a0-462a-b9c1-70aea2ea58f2","Type":"ContainerStarted","Data":"4ab5f3f5e83f60dda73baea47edf427317e18772ae8c3e997e5136711085abcf"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.521317 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" event={"ID":"61feb33b-8fe3-4ab5-b3a2-df7db4172225","Type":"ContainerStarted","Data":"ad0c48418be17befccadcd9fcd5a29f012288e7fb2b54d07b1c99cec3a7a6aad"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.549739 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.550113 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.050097949 +0000 UTC m=+97.266319873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.571015 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" event={"ID":"00792261-f23c-4fc8-a67b-4b7753b692a1","Type":"ContainerStarted","Data":"9c921519cb2d1983857f71f2fc56ec67bec1d99fc910d5a34c456ee8e32fe4d4"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.603125 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sh5l7"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.631065 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.643445 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" event={"ID":"2e786864-e8b1-4a03-9327-14d389a5bc21","Type":"ContainerStarted","Data":"9d64276a674c6a2d99dfe9285c7d9e37d416852678303c6c1bd375d233ba3cbd"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.645872 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.653459 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.655180 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.155166029 +0000 UTC m=+97.371387953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.666669 4921 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mtf22 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.666882 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" podUID="2e786864-e8b1-4a03-9327-14d389a5bc21" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.677529 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" podStartSLOduration=72.677513021 podStartE2EDuration="1m12.677513021s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:19.675296359 +0000 UTC m=+96.891518293" watchObservedRunningTime="2025-12-10 12:58:19.677513021 +0000 UTC m=+96.893734965" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.705539 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" event={"ID":"4ae7e30d-338b-464e-baef-c7e304ff67d9","Type":"ContainerStarted","Data":"3d17ffe7d4ed8b364187264dd9e9d9cda36e7be2c84d8330a09561ac4aff8acd"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.734259 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" event={"ID":"1172878a-36b0-44ca-9df0-7eff88ebadfd","Type":"ContainerStarted","Data":"4dd027860e671af5349a557c05d0626d14271873c54c5cccd3b2c04069c063e7"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.765238 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.766637 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.266603587 +0000 UTC m=+97.482825511 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.766921 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.767231 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.267224354 +0000 UTC m=+97.483446278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.784053 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" event={"ID":"9a307988-ced0-4f7b-b6d2-e981c88eabd7","Type":"ContainerStarted","Data":"5aef6c59ba567de9eedf355d2a9d754026e0bb0277b2a97f9c50805a839ceec8"} Dec 10 12:58:19 crc kubenswrapper[4921]: W1210 12:58:19.784957 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34937a53_6292_4363_ad11_9d5e180b13b5.slice/crio-3afb065da24d3a37bdfeaa3ea4268b57abdd5eadeaa3b0cec293a69912337885 WatchSource:0}: Error finding container 3afb065da24d3a37bdfeaa3ea4268b57abdd5eadeaa3b0cec293a69912337885: Status 404 returned error can't find the container with id 3afb065da24d3a37bdfeaa3ea4268b57abdd5eadeaa3b0cec293a69912337885 Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.810298 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" event={"ID":"959e5098-562a-471a-9396-fed74ed113b5","Type":"ContainerStarted","Data":"7c7a78489563e20e664463b70c0da0356f4377687c580f0b90944d9ce6ef68a5"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.834325 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-m9sh2"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.839382 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" event={"ID":"9ab094b2-78dc-4ee6-b563-a1ae064588cf","Type":"ContainerStarted","Data":"708ceaed6929037dd5a0bf29e1d6c98c53678ce0e91e26d0085f545b17bc8e36"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.841191 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.845509 4921 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gwnkp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.845671 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.869536 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.869917 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.369901277 +0000 UTC m=+97.586123191 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.895802 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8pb7c" event={"ID":"ad2115a5-1371-4a19-b1e8-7f93a7719a71","Type":"ContainerStarted","Data":"99d3ce77f0b6878db425aa4b744ba0ad1c20c07ff8e7c42826edf00826dea06c"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.924099 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" podStartSLOduration=72.924078973 podStartE2EDuration="1m12.924078973s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:19.875194454 +0000 UTC m=+97.091416378" watchObservedRunningTime="2025-12-10 12:58:19.924078973 +0000 UTC m=+97.140300887" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.925170 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx"] Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.935362 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" event={"ID":"03c59650-42ea-4995-ada6-f86eb8aed1de","Type":"ContainerStarted","Data":"5e2fd9f465095a797730b4e59276447b63b2643244e3aff28570797a377679f5"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.936577 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.954785 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" event={"ID":"a24ccf53-2780-49dd-9f26-d80a42631230","Type":"ContainerStarted","Data":"7e329751a5ee3cf95338d4ca4e172b1230c48b58161ad910f05c4c2f5d904836"} Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.963656 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.970708 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.974187 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" podStartSLOduration=71.974171595 podStartE2EDuration="1m11.974171595s" podCreationTimestamp="2025-12-10 12:57:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:19.973646611 +0000 UTC m=+97.189868555" watchObservedRunningTime="2025-12-10 12:58:19.974171595 +0000 UTC m=+97.190393519" Dec 10 12:58:19 crc kubenswrapper[4921]: E1210 12:58:19.980148 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.480134371 +0000 UTC m=+97.696356295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:19 crc kubenswrapper[4921]: I1210 12:58:19.974341 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8pb7c" podStartSLOduration=72.97433634 podStartE2EDuration="1m12.97433634s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:19.925136163 +0000 UTC m=+97.141358087" watchObservedRunningTime="2025-12-10 12:58:19.97433634 +0000 UTC m=+97.190558264" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.060758 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" event={"ID":"ed4e7481-d39e-45b7-98e6-8dceac35361c","Type":"ContainerStarted","Data":"5453bd2e077830f8b19a1703b79d1a2f0c60c8cb52d7c5c3a4845373c55b342d"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.075725 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.076177 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.57615069 +0000 UTC m=+97.792372614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.076310 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.078894 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.578874675 +0000 UTC m=+97.795096799 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.090052 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" event={"ID":"3b87905a-cfdd-4617-90e8-38eb01147070","Type":"ContainerStarted","Data":"2b009fd71da37d3d02480a80a7fc13ece702c98005ba1e6756f1bc3aef9f2022"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.143624 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" event={"ID":"2b8366f4-8622-49dd-a0ab-d832fd34bccd","Type":"ContainerStarted","Data":"6605642ea08b6c1b8cf8af8e9e1b8afc06b420f82c83b7920beca106ab8742c4"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.146126 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" event={"ID":"eb53e918-67d3-472c-a390-d9a4e8f44fee","Type":"ContainerStarted","Data":"da05fa1e9f8fd643b12d6f8f766fe896210cf78463f96be14d56892fed971dc1"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.160901 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" event={"ID":"abc0551e-9e8b-4466-9eea-1da60ead65bb","Type":"ContainerStarted","Data":"fbb2abde738949b4423517eee228a6ded03d9f12ff6f22f19cbc77be53f3bef8"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.166855 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-qkh5b" podStartSLOduration=72.16683194 podStartE2EDuration="1m12.16683194s" podCreationTimestamp="2025-12-10 12:57:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.104252501 +0000 UTC m=+97.320474425" watchObservedRunningTime="2025-12-10 12:58:20.16683194 +0000 UTC m=+97.383053864" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.189893 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.193589 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.693548802 +0000 UTC m=+97.909770726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.250756 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-xtvnd" event={"ID":"11c4ff79-c760-4e5d-8594-8dd82990dec0","Type":"ContainerStarted","Data":"a2da58f280074f20ac44fc5e5779403dd487337ee591509b2aa89dbccab06b47"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.267478 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.267817 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.269822 4921 generic.go:334] "Generic (PLEG): container finished" podID="617cf016-593a-4d56-b104-e450cd6368ee" containerID="ba4b75eb68f1c22dc0f85ebf947193ebdb0ca8ecf15243ff39980f659be439d6" exitCode=0 Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.270550 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" event={"ID":"617cf016-593a-4d56-b104-e450cd6368ee","Type":"ContainerDied","Data":"ba4b75eb68f1c22dc0f85ebf947193ebdb0ca8ecf15243ff39980f659be439d6"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.287080 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.292099 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jpzts" podStartSLOduration=73.29097612 podStartE2EDuration="1m13.29097612s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.174690928 +0000 UTC m=+97.390912862" watchObservedRunningTime="2025-12-10 12:58:20.29097612 +0000 UTC m=+97.507198034" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.293362 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.296948 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.796925705 +0000 UTC m=+98.013147629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.333405 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" event={"ID":"3c94527b-255d-4486-9a36-9fd0f7efd4d7","Type":"ContainerStarted","Data":"d3cd2f1c7bf47e0ea1613bcc1005e6584165404971f2367ff1f97a87c220da89"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.363368 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" event={"ID":"07f665ea-3510-48cc-bacf-b92172029dce","Type":"ContainerStarted","Data":"0e5f461541b20d70199aa0d92ccd0790a62930dc8eeb9b38f8e102fc41bf3ed0"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.382042 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-xtvnd" podStartSLOduration=73.38201103 podStartE2EDuration="1m13.38201103s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.288022548 +0000 UTC m=+97.504244502" watchObservedRunningTime="2025-12-10 12:58:20.38201103 +0000 UTC m=+97.598232954" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.389067 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" event={"ID":"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d","Type":"ContainerStarted","Data":"60553dd57f06469655b9ab55f6fd735891e018a6a469562cbd43ba7c5ff33fe5"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.398846 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.399428 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:20.899408484 +0000 UTC m=+98.115630408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.406100 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" event={"ID":"b3ad786b-c5ca-47ba-8188-680f6c13192b","Type":"ContainerStarted","Data":"3f1c0f3f00bc72c84aaff2de1c2568f24750475afcc2aab07282aad494ba35df"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.407679 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7smdt" event={"ID":"23dc36ea-02d4-493e-863b-56126c624ab0","Type":"ContainerStarted","Data":"0a089a5feda4841ee609731218e9e3e9d2667fc67d45472340bfba9bb131e0f5"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.408922 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.413843 4921 patch_prober.go:28] interesting pod/downloads-7954f5f757-7smdt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.413885 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7smdt" podUID="23dc36ea-02d4-493e-863b-56126c624ab0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.464081 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" event={"ID":"76a2b163-6490-4d77-947f-c7333cb25129","Type":"ContainerStarted","Data":"317164ac77aa61f36f53870111f2f1858c350bbc02a9036d6be454ed033e9e03"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.500773 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.502496 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.002474668 +0000 UTC m=+98.218696592 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.519135 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7smdt" podStartSLOduration=73.5191112 podStartE2EDuration="1m13.5191112s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.516812847 +0000 UTC m=+97.733034771" watchObservedRunningTime="2025-12-10 12:58:20.5191112 +0000 UTC m=+97.735333134" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.525113 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" event={"ID":"5b553e21-002b-4905-8f32-6950d71db324","Type":"ContainerStarted","Data":"3869e8b5905b9c1c58ce720a4386939d7fbc4d3ab37418b9b11d33fdcb8617b5"} Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.571901 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d8m4j" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.583747 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.599747 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" podStartSLOduration=73.599719511 podStartE2EDuration="1m13.599719511s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.597752626 +0000 UTC m=+97.813974550" watchObservedRunningTime="2025-12-10 12:58:20.599719511 +0000 UTC m=+97.815941445" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.600680 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:20 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:20 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:20 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.600755 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.602101 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.603447 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.103433704 +0000 UTC m=+98.319655628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.714586 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.717083 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.217071722 +0000 UTC m=+98.433293646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.820722 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" podStartSLOduration=73.820702252 podStartE2EDuration="1m13.820702252s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.695158863 +0000 UTC m=+97.911380777" watchObservedRunningTime="2025-12-10 12:58:20.820702252 +0000 UTC m=+98.036924176" Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.828093 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.828629 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.328610492 +0000 UTC m=+98.544832416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:20 crc kubenswrapper[4921]: I1210 12:58:20.930612 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:20 crc kubenswrapper[4921]: E1210 12:58:20.931326 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.431315266 +0000 UTC m=+98.647537190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.032227 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.032783 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.532765976 +0000 UTC m=+98.748987900 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.139649 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.140164 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.64014989 +0000 UTC m=+98.856371814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.241024 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.241404 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.741373264 +0000 UTC m=+98.957595188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.344721 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.345363 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.845351393 +0000 UTC m=+99.061573307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.447566 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.448085 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:21.948060628 +0000 UTC m=+99.164282552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.549937 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.550491 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.050471594 +0000 UTC m=+99.266693518 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.594551 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:21 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:21 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:21 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.594649 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.607674 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" event={"ID":"f0bed6a9-002b-40fc-89d5-031568b7e47c","Type":"ContainerStarted","Data":"5161c54ac013da31ce69b610b83f5b465d02de3086ec0ad7ee23d442a8badc05"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.607759 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" event={"ID":"f0bed6a9-002b-40fc-89d5-031568b7e47c","Type":"ContainerStarted","Data":"cfbb994235c7e859be2a0247858d26b861364ea81372a3d0025b505aa4fc4afc"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.634953 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" event={"ID":"9831d515-5a42-4f3e-98a9-aca48b1093d8","Type":"ContainerStarted","Data":"f0546c1f0add3b047c9f34d7cadad94fd4806302055da3f951e5abdd2a185ec4"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.649523 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-5f9l6" podStartSLOduration=74.649507247 podStartE2EDuration="1m14.649507247s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:21.648715265 +0000 UTC m=+98.864937219" watchObservedRunningTime="2025-12-10 12:58:21.649507247 +0000 UTC m=+98.865729171" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.650309 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" podStartSLOduration=74.650304659 podStartE2EDuration="1m14.650304659s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:20.865533958 +0000 UTC m=+98.081755882" watchObservedRunningTime="2025-12-10 12:58:21.650304659 +0000 UTC m=+98.866526583" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.650499 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.651328 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.151288926 +0000 UTC m=+99.367511010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.679449 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bx4bn" event={"ID":"5b553e21-002b-4905-8f32-6950d71db324","Type":"ContainerStarted","Data":"b84e26a4b77358a3dbb23490ab12bdf4d689b4747b18ca48ad42d0508635842e"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.705577 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vp9nq" event={"ID":"2d27a376-92a0-462a-b9c1-70aea2ea58f2","Type":"ContainerStarted","Data":"221d31a613fef7abc481cca0da394fa8ec2f79af0b3ebaae7f68fbbd4c9e6a7c"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.734932 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-cwdx8" event={"ID":"07f665ea-3510-48cc-bacf-b92172029dce","Type":"ContainerStarted","Data":"ed59ee7290a5070dd14bffea623b827beec1f19b72afd3b0239d80cd1503889f"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.763958 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-vp9nq" podStartSLOduration=7.763933507 podStartE2EDuration="7.763933507s" podCreationTimestamp="2025-12-10 12:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:21.762900268 +0000 UTC m=+98.979122192" watchObservedRunningTime="2025-12-10 12:58:21.763933507 +0000 UTC m=+98.980155421" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.764275 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.766055 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.266033255 +0000 UTC m=+99.482255359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.786128 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" event={"ID":"4ae7e30d-338b-464e-baef-c7e304ff67d9","Type":"ContainerStarted","Data":"5d71cef02623d9ccfd8dd8ba7514be12963feb6b79b88ebcc8e4c015a0501123"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.855675 4921 generic.go:334] "Generic (PLEG): container finished" podID="959e5098-562a-471a-9396-fed74ed113b5" containerID="7c7a78489563e20e664463b70c0da0356f4377687c580f0b90944d9ce6ef68a5" exitCode=0 Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.855895 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" event={"ID":"959e5098-562a-471a-9396-fed74ed113b5","Type":"ContainerDied","Data":"7c7a78489563e20e664463b70c0da0356f4377687c580f0b90944d9ce6ef68a5"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.869002 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.869505 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.369441929 +0000 UTC m=+99.585663863 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.880544 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" event={"ID":"3f1deb2d-0d02-4085-8aba-d05024240e34","Type":"ContainerStarted","Data":"649e4a43e1d1723b2f02340db0ac6ed6c55d6ad3c57c24a794156fa785d4875b"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.893775 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" event={"ID":"abc0551e-9e8b-4466-9eea-1da60ead65bb","Type":"ContainerStarted","Data":"48209f9b9b59087dfaa3a2de3c789bb60e49449f1a1d6d9fcc8d7e066391c646"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.906082 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" event={"ID":"9ab094b2-78dc-4ee6-b563-a1ae064588cf","Type":"ContainerStarted","Data":"62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.911733 4921 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gwnkp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.911801 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.914153 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8fpc5" podStartSLOduration=74.914122811 podStartE2EDuration="1m14.914122811s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:21.845914415 +0000 UTC m=+99.062136349" watchObservedRunningTime="2025-12-10 12:58:21.914122811 +0000 UTC m=+99.130344755" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.929410 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" event={"ID":"3c94527b-255d-4486-9a36-9fd0f7efd4d7","Type":"ContainerStarted","Data":"b4a364c96db00699836e78fb1170692b2a7e7e3e6fe513e3f331088be391b42b"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.958341 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-cgqtr" podStartSLOduration=74.958314219 podStartE2EDuration="1m14.958314219s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:21.957149797 +0000 UTC m=+99.173371821" watchObservedRunningTime="2025-12-10 12:58:21.958314219 +0000 UTC m=+99.174536143" Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.960995 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8pb7c" event={"ID":"ad2115a5-1371-4a19-b1e8-7f93a7719a71","Type":"ContainerStarted","Data":"5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.969749 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:21 crc kubenswrapper[4921]: E1210 12:58:21.970061 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.470048655 +0000 UTC m=+99.686270579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.986827 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" event={"ID":"691beb9c-4f21-4efc-b00d-731786bc427d","Type":"ContainerStarted","Data":"f1c261326fb7cef46e3ab860672ee7175b53c020abdccbdf230bb307cd692fd8"} Dec 10 12:58:21 crc kubenswrapper[4921]: I1210 12:58:21.998250 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rslp7" podStartSLOduration=74.998223348 podStartE2EDuration="1m14.998223348s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:21.995882953 +0000 UTC m=+99.212104887" watchObservedRunningTime="2025-12-10 12:58:21.998223348 +0000 UTC m=+99.214445262" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.015876 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" event={"ID":"451a15e9-0c7d-4999-b37d-d255aa272b3a","Type":"ContainerStarted","Data":"361716d84a5e7219154e6eab330fa50185c6b475d73b3d7b8eb9ded422361d72"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.055260 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" event={"ID":"a509d814-6573-4bfd-92c7-8d15a2d47228","Type":"ContainerStarted","Data":"500feb0722a63654bf84513eae55eefdc2938e34ac47075ae6be39ffab528502"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.080870 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.082319 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.582293095 +0000 UTC m=+99.798515019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.178708 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" event={"ID":"b3ad786b-c5ca-47ba-8188-680f6c13192b","Type":"ContainerStarted","Data":"5b826800016c072d5fcf56075306de689fee1463986c70b1727ad98b4f9bc99e"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.182695 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.185066 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.685051871 +0000 UTC m=+99.901273795 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.211990 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" event={"ID":"61feb33b-8fe3-4ab5-b3a2-df7db4172225","Type":"ContainerStarted","Data":"2b3f12ed3a70ee4a8711284ef1d22a0ed4aaf3b11e83d7bf7579c103af09eb80"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.212939 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.215213 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" event={"ID":"3b87905a-cfdd-4617-90e8-38eb01147070","Type":"ContainerStarted","Data":"4a62ec9d74c244cc512d652fe536965a70da0ba29ffbd04cb82acd807a4773d8"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.216141 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.237438 4921 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-mnz9t container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.237503 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" podUID="3b87905a-cfdd-4617-90e8-38eb01147070" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.238360 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6b7sj" event={"ID":"76a2b163-6490-4d77-947f-c7333cb25129","Type":"ContainerStarted","Data":"29d16d7798bb1d350b2868468bea781f57948d730e498596c5cd1b1de8e20a40"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.255066 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sh5l7" event={"ID":"bd4b30ef-c583-4142-874c-806c9038a954","Type":"ContainerStarted","Data":"8995e30e7d018d9fdaa7111fe422051d30568658b173ffe5484f9d76daf57216"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.267194 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" podStartSLOduration=75.267177183 podStartE2EDuration="1m15.267177183s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.08355138 +0000 UTC m=+99.299773304" watchObservedRunningTime="2025-12-10 12:58:22.267177183 +0000 UTC m=+99.483399107" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.286984 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7smdt" event={"ID":"23dc36ea-02d4-493e-863b-56126c624ab0","Type":"ContainerStarted","Data":"b72cd28a6088bfc939bcc670fd3a2897c69e65638a9f6850746e9eabe62b9808"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.287797 4921 patch_prober.go:28] interesting pod/downloads-7954f5f757-7smdt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.287864 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7smdt" podUID="23dc36ea-02d4-493e-863b-56126c624ab0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.287879 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.288845 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.788789214 +0000 UTC m=+100.005011138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.289741 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.292808 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.792790805 +0000 UTC m=+100.009012729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.339293 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" event={"ID":"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5","Type":"ContainerStarted","Data":"6bdb72c07083a91055c8fba58072ef90cc467b6bd55712eeb8a1ef8e53594a59"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.361851 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" podStartSLOduration=75.361813283 podStartE2EDuration="1m15.361813283s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.35628916 +0000 UTC m=+99.572511094" watchObservedRunningTime="2025-12-10 12:58:22.361813283 +0000 UTC m=+99.578035207" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.362203 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jmkzr" podStartSLOduration=75.362195704 podStartE2EDuration="1m15.362195704s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.27103819 +0000 UTC m=+99.487260114" watchObservedRunningTime="2025-12-10 12:58:22.362195704 +0000 UTC m=+99.578417628" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.374879 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" event={"ID":"1172878a-36b0-44ca-9df0-7eff88ebadfd","Type":"ContainerStarted","Data":"1f4298a20d4bacc0534f369e92e6e29ceeb2c2424331131ab71da5a85b593c07"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.387230 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" event={"ID":"eb53e918-67d3-472c-a390-d9a4e8f44fee","Type":"ContainerStarted","Data":"4fad1062b877afcd7817cd34573c2b9c80bd32165d51463adfd41bca5fa861aa"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.388905 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.396442 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.401543 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:22.901507197 +0000 UTC m=+100.117729121 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.453870 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" event={"ID":"00792261-f23c-4fc8-a67b-4b7753b692a1","Type":"ContainerStarted","Data":"ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.455331 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.455782 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.498568 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.500778 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.000760505 +0000 UTC m=+100.216982429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.543146 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" event={"ID":"83010039-4d8d-46c5-9069-585653c86378","Type":"ContainerStarted","Data":"eceb783c24571e8f889e8c00aeecc97ccd870e73aaa1bb10ff8d724b7b376198"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.555374 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" event={"ID":"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4","Type":"ContainerStarted","Data":"fce06f264a72e6e713a27e105e8c203ffa200ab299c6628fd3b8f84b20e9b2a7"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.587168 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:22 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:22 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:22 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.587240 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.593661 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-xtvnd" event={"ID":"11c4ff79-c760-4e5d-8594-8dd82990dec0","Type":"ContainerStarted","Data":"d6983a54bc7bce193f482b5536127cc42a0a532e0d33c4210f4a4a6e3eefc5ef"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.599879 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" podStartSLOduration=75.599854279 podStartE2EDuration="1m15.599854279s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.482901029 +0000 UTC m=+99.699122973" watchObservedRunningTime="2025-12-10 12:58:22.599854279 +0000 UTC m=+99.816076203" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.601676 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.603640 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.103616454 +0000 UTC m=+100.319838378 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.617020 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" event={"ID":"34937a53-6292-4363-ad11-9d5e180b13b5","Type":"ContainerStarted","Data":"3afb065da24d3a37bdfeaa3ea4268b57abdd5eadeaa3b0cec293a69912337885"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.647903 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-78cdm" event={"ID":"00babddd-5238-454a-92dc-ba2954d36d78","Type":"ContainerStarted","Data":"6a35bda49849cb02c7285eb2f605a666fe4bea1e420ebfef48e74808224de1d0"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.647976 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-78cdm" event={"ID":"00babddd-5238-454a-92dc-ba2954d36d78","Type":"ContainerStarted","Data":"80c7335076a2db695aae6fdd23ecd8a6f4eceb0c6de3643b163b249f97e69bc8"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.676519 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" event={"ID":"6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d","Type":"ContainerStarted","Data":"fb503f0cb08ce89f46685b4ef9aa592c42d7fcad08710065b7a711fb31e938cb"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.677463 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.688700 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ccdh5" podStartSLOduration=75.688680518 podStartE2EDuration="1m15.688680518s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.687471704 +0000 UTC m=+99.903693648" watchObservedRunningTime="2025-12-10 12:58:22.688680518 +0000 UTC m=+99.904902442" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.688861 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" podStartSLOduration=75.688857143 podStartE2EDuration="1m15.688857143s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.605838085 +0000 UTC m=+99.822060009" watchObservedRunningTime="2025-12-10 12:58:22.688857143 +0000 UTC m=+99.905079057" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.691634 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" event={"ID":"49b1e180-71ea-45cf-bc4f-4b2283aa366e","Type":"ContainerStarted","Data":"386515a531306de6764945166398d96d69f21cccdbfd401cd322084a1cbf028c"} Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.703488 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.705134 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.205114214 +0000 UTC m=+100.421336138 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.705270 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.786016 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nddsp" podStartSLOduration=74.785996761 podStartE2EDuration="1m14.785996761s" podCreationTimestamp="2025-12-10 12:57:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.768695801 +0000 UTC m=+99.984917735" watchObservedRunningTime="2025-12-10 12:58:22.785996761 +0000 UTC m=+100.002218685" Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.812057 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.814719 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.314690449 +0000 UTC m=+100.530912373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.918716 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:22 crc kubenswrapper[4921]: E1210 12:58:22.919159 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.419140992 +0000 UTC m=+100.635362916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:22 crc kubenswrapper[4921]: I1210 12:58:22.955784 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" podStartSLOduration=75.955758429 podStartE2EDuration="1m15.955758429s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:22.859020031 +0000 UTC m=+100.075241965" watchObservedRunningTime="2025-12-10 12:58:22.955758429 +0000 UTC m=+100.171980353" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.022592 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.022934 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.522890845 +0000 UTC m=+100.739112769 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.022986 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.023517 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.523503502 +0000 UTC m=+100.739725426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.042345 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" podStartSLOduration=76.042321835 podStartE2EDuration="1m16.042321835s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:23.035372682 +0000 UTC m=+100.251594606" watchObservedRunningTime="2025-12-10 12:58:23.042321835 +0000 UTC m=+100.258543759" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.126022 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.126370 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.626355721 +0000 UTC m=+100.842577645 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.189957 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-78cdm" podStartSLOduration=9.189915487 podStartE2EDuration="9.189915487s" podCreationTimestamp="2025-12-10 12:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:23.18604757 +0000 UTC m=+100.402269504" watchObservedRunningTime="2025-12-10 12:58:23.189915487 +0000 UTC m=+100.406137411" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.212729 4921 patch_prober.go:28] interesting pod/console-operator-58897d9998-dbzbp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.212803 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" podUID="61feb33b-8fe3-4ab5-b3a2-df7db4172225" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.227449 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.227973 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.727953664 +0000 UTC m=+100.944175588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.333831 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.334690 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.83466641 +0000 UTC m=+101.050888324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.423701 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" podStartSLOduration=76.423675474 podStartE2EDuration="1m16.423675474s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:23.298837144 +0000 UTC m=+100.515059068" watchObservedRunningTime="2025-12-10 12:58:23.423675474 +0000 UTC m=+100.639897398" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.438597 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.439105 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:23.939086662 +0000 UTC m=+101.155308586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.468166 4921 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-g7bns container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.28:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.468317 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" podUID="00792261-f23c-4fc8-a67b-4b7753b692a1" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.28:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.541026 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.541603 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.041586301 +0000 UTC m=+101.257808225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.593037 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:23 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:23 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:23 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.593103 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.647054 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.647382 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.147371001 +0000 UTC m=+101.363592925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.678795 4921 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-dkwtw container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.679278 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" podUID="6e462a60-2c72-4fe1-a9a1-e9f2a5cf185d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.748904 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.749280 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.249266913 +0000 UTC m=+101.465488837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.757439 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" event={"ID":"a509d814-6573-4bfd-92c7-8d15a2d47228","Type":"ContainerStarted","Data":"ce15def43e58d12dc5ad401aaa4019f3d37c8160805b33715b4fceb66e674953"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.757598 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" event={"ID":"a509d814-6573-4bfd-92c7-8d15a2d47228","Type":"ContainerStarted","Data":"4adb117f4d218ce24f05093eb3bc5a86ee54a837246523d643886d3b979f0661"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.795821 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" event={"ID":"9a307988-ced0-4f7b-b6d2-e981c88eabd7","Type":"ContainerStarted","Data":"f5982f8d8c8b1f9abea45b5d1fb2854346cbe369fce07fd100c6bc0b9d7a8bcf"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.795888 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" event={"ID":"9a307988-ced0-4f7b-b6d2-e981c88eabd7","Type":"ContainerStarted","Data":"f7425289a2a88af4af7a5edb4da98938743d4aa3a5ef60130e0fb526c3b1ca62"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.815523 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bbgxs" event={"ID":"49b1e180-71ea-45cf-bc4f-4b2283aa366e","Type":"ContainerStarted","Data":"9525f05b10a520698ed26129bb517a9364ac4620f24f9c4b6a71296178373ec4"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.846284 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" event={"ID":"abc0551e-9e8b-4466-9eea-1da60ead65bb","Type":"ContainerStarted","Data":"808415291ed907e3b649915554532709775e03701000016e1a3e6a3e0e0d09b7"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.846760 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.853000 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.854358 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.354346073 +0000 UTC m=+101.570567997 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.881953 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" event={"ID":"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4","Type":"ContainerStarted","Data":"77842ebaa30ec92240362512908a6111b6d78a4f0c8e8c188c7edcd51ab6eeff"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.882033 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" event={"ID":"ea8b1b90-5a2a-4f0c-8fcb-c961b1ce0aa4","Type":"ContainerStarted","Data":"4b3b6aadaf7d18bc3b997dcf75acba92e789d438074dd0cf7c78e2f8a20b1adb"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.916901 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" event={"ID":"617cf016-593a-4d56-b104-e450cd6368ee","Type":"ContainerStarted","Data":"73b55109aea354a5d912ac7a6dbbe4a388d7bfe5877377ae62689c4e5c44f29a"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.916969 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" event={"ID":"617cf016-593a-4d56-b104-e450cd6368ee","Type":"ContainerStarted","Data":"973d83e25970362a228de9ab438931ff13e6d67d3e52251a1ebe3965a4f93791"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.955328 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:23 crc kubenswrapper[4921]: E1210 12:58:23.958622 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.45859137 +0000 UTC m=+101.674813294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.958899 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-nvgsl" event={"ID":"451a15e9-0c7d-4999-b37d-d255aa272b3a","Type":"ContainerStarted","Data":"a6f5642d8a551351e2c02160bfd94e5345f640a4fe8e06c304cea05b6510b7be"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.972362 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7hhsc" event={"ID":"cb397e19-ed97-4db5-a9a0-a8da0d3a55a5","Type":"ContainerStarted","Data":"6ce59c444b222fdbf4a52b2b43ca969ba0e40b164f1a96ff1abfa4bd47f8825b"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.988468 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" event={"ID":"691beb9c-4f21-4efc-b00d-731786bc427d","Type":"ContainerStarted","Data":"7d760d81d58641acec0fccaf29efbf58d06cf4944502e7ecf692e46c8f9043d5"} Dec 10 12:58:23 crc kubenswrapper[4921]: I1210 12:58:23.996322 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" event={"ID":"83010039-4d8d-46c5-9069-585653c86378","Type":"ContainerStarted","Data":"58e07bedcc4baeb625faf52e73b9196d212b059e923a32eaec8acf5b1976908b"} Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.015912 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" event={"ID":"34937a53-6292-4363-ad11-9d5e180b13b5","Type":"ContainerStarted","Data":"f97e548419c7324d2cbba5c7ef93375ce6be480bc8a93df364633cef374f7e1a"} Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.016188 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" event={"ID":"34937a53-6292-4363-ad11-9d5e180b13b5","Type":"ContainerStarted","Data":"96ad11da776a84663179c2766d1b9ea31b4ad0cf641ccf1b7e060cf84d9e09b1"} Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.026142 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sh5l7" event={"ID":"bd4b30ef-c583-4142-874c-806c9038a954","Type":"ContainerStarted","Data":"37347a1bbfb0a6634d8a98b3d81558c9d6c5884a5c7c8019e1372f45519b3737"} Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.026518 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sh5l7" event={"ID":"bd4b30ef-c583-4142-874c-806c9038a954","Type":"ContainerStarted","Data":"c0733e51e18d8536ed372e877a11846dc3a1f6ba6f97c251b700932c9eb15e23"} Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.026714 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.038280 4921 generic.go:334] "Generic (PLEG): container finished" podID="a24ccf53-2780-49dd-9f26-d80a42631230" containerID="92d851a1a3ffafeb6b3de4b62050b9ed94fbf4fa954577f41577956db054c3fe" exitCode=0 Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.043619 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" event={"ID":"a24ccf53-2780-49dd-9f26-d80a42631230","Type":"ContainerDied","Data":"92d851a1a3ffafeb6b3de4b62050b9ed94fbf4fa954577f41577956db054c3fe"} Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.047997 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d7btl"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.063504 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.066718 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.566688774 +0000 UTC m=+101.782910698 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.127897 4921 patch_prober.go:28] interesting pod/downloads-7954f5f757-7smdt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.128279 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7smdt" podUID="23dc36ea-02d4-493e-863b-56126c624ab0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.142477 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.177209 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d7btl"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.177281 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dbzbp" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.177308 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dkwtw" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.177332 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mnz9t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.143579 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.177068 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.177165 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.677141364 +0000 UTC m=+101.893363288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.179340 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.189073 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.195447 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.208629 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.708605579 +0000 UTC m=+101.924827503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.256470 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gq8pd"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.272935 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.278189 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.291273 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.292048 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2m2v\" (UniqueName: \"kubernetes.io/projected/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-kube-api-access-w2m2v\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.292117 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-utilities\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.292291 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-catalog-content\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.293329 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.793303142 +0000 UTC m=+102.009525066 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.293528 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gq8pd"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.396290 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-catalog-content\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.397139 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-catalog-content\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.397291 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.397651 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.897636522 +0000 UTC m=+102.113858446 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.397864 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-catalog-content\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.397900 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p56tc\" (UniqueName: \"kubernetes.io/projected/3188aa17-7df1-4bc3-a929-7e5888cb32c8-kube-api-access-p56tc\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.397968 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2m2v\" (UniqueName: \"kubernetes.io/projected/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-kube-api-access-w2m2v\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.397989 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-utilities\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.398010 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-utilities\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.398483 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-utilities\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.421157 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgbkv" podStartSLOduration=77.421141175 podStartE2EDuration="1m17.421141175s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:24.420649982 +0000 UTC m=+101.636871916" watchObservedRunningTime="2025-12-10 12:58:24.421141175 +0000 UTC m=+101.637363089" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.452416 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2m2v\" (UniqueName: \"kubernetes.io/projected/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-kube-api-access-w2m2v\") pod \"community-operators-d7btl\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.498917 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.499019 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.999002589 +0000 UTC m=+102.215224513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.499194 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-utilities\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.499593 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-utilities\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.499654 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.499945 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:24.999937065 +0000 UTC m=+102.216158989 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.499986 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-catalog-content\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.500008 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p56tc\" (UniqueName: \"kubernetes.io/projected/3188aa17-7df1-4bc3-a929-7e5888cb32c8-kube-api-access-p56tc\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.500404 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-catalog-content\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.519855 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-prv2t"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.520808 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.556638 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.561126 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-prv2t"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.567124 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p56tc\" (UniqueName: \"kubernetes.io/projected/3188aa17-7df1-4bc3-a929-7e5888cb32c8-kube-api-access-p56tc\") pod \"certified-operators-gq8pd\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.598190 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.602320 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.602812 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-utilities\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.602906 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-catalog-content\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.603033 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7kjl\" (UniqueName: \"kubernetes.io/projected/013be189-8bbe-40ef-af47-51c8fd79aa1a-kube-api-access-m7kjl\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.603241 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.103225106 +0000 UTC m=+102.319447030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.605599 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:24 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:24 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:24 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.605650 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.657773 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wblqq"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.660803 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.718968 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-utilities\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.719480 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-catalog-content\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.719628 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7kjl\" (UniqueName: \"kubernetes.io/projected/013be189-8bbe-40ef-af47-51c8fd79aa1a-kube-api-access-m7kjl\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.719729 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.729130 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wblqq"] Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.729972 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-utilities\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.730329 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-catalog-content\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.741459 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.241437537 +0000 UTC m=+102.457659461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.822712 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.866679 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.366623506 +0000 UTC m=+102.582845430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.875370 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knksg\" (UniqueName: \"kubernetes.io/projected/2629ed61-02c7-450a-9d32-d3277755229f-kube-api-access-knksg\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.875479 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-utilities\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.875509 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-catalog-content\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.884491 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7kjl\" (UniqueName: \"kubernetes.io/projected/013be189-8bbe-40ef-af47-51c8fd79aa1a-kube-api-access-m7kjl\") pod \"community-operators-prv2t\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.926981 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-kxlr2" podStartSLOduration=77.926957613 podStartE2EDuration="1m17.926957613s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:24.783264399 +0000 UTC m=+101.999486323" watchObservedRunningTime="2025-12-10 12:58:24.926957613 +0000 UTC m=+102.143179537" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.979004 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-utilities\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.979043 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-catalog-content\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.979112 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.979161 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knksg\" (UniqueName: \"kubernetes.io/projected/2629ed61-02c7-450a-9d32-d3277755229f-kube-api-access-knksg\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: E1210 12:58:24.979657 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.479646327 +0000 UTC m=+102.695868251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.980170 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-utilities\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:24 crc kubenswrapper[4921]: I1210 12:58:24.983058 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-catalog-content\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.012651 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-kz2qx" podStartSLOduration=78.012634014 podStartE2EDuration="1m18.012634014s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:24.925851112 +0000 UTC m=+102.142073026" watchObservedRunningTime="2025-12-10 12:58:25.012634014 +0000 UTC m=+102.228855938" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.014641 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-sh5l7" podStartSLOduration=11.0146335 podStartE2EDuration="11.0146335s" podCreationTimestamp="2025-12-10 12:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.007982355 +0000 UTC m=+102.224204289" watchObservedRunningTime="2025-12-10 12:58:25.0146335 +0000 UTC m=+102.230855424" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.044044 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.048005 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knksg\" (UniqueName: \"kubernetes.io/projected/2629ed61-02c7-450a-9d32-d3277755229f-kube-api-access-knksg\") pod \"certified-operators-wblqq\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.083200 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/959e5098-562a-471a-9396-fed74ed113b5-secret-volume\") pod \"959e5098-562a-471a-9396-fed74ed113b5\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.083316 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/959e5098-562a-471a-9396-fed74ed113b5-config-volume\") pod \"959e5098-562a-471a-9396-fed74ed113b5\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.083344 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp5br\" (UniqueName: \"kubernetes.io/projected/959e5098-562a-471a-9396-fed74ed113b5-kube-api-access-kp5br\") pod \"959e5098-562a-471a-9396-fed74ed113b5\" (UID: \"959e5098-562a-471a-9396-fed74ed113b5\") " Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.083469 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.083777 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.583763011 +0000 UTC m=+102.799984935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.086000 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/959e5098-562a-471a-9396-fed74ed113b5-config-volume" (OuterVolumeSpecName: "config-volume") pod "959e5098-562a-471a-9396-fed74ed113b5" (UID: "959e5098-562a-471a-9396-fed74ed113b5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.090594 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/959e5098-562a-471a-9396-fed74ed113b5-kube-api-access-kp5br" (OuterVolumeSpecName: "kube-api-access-kp5br") pod "959e5098-562a-471a-9396-fed74ed113b5" (UID: "959e5098-562a-471a-9396-fed74ed113b5"). InnerVolumeSpecName "kube-api-access-kp5br". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.093028 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/959e5098-562a-471a-9396-fed74ed113b5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "959e5098-562a-471a-9396-fed74ed113b5" (UID: "959e5098-562a-471a-9396-fed74ed113b5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.116766 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" event={"ID":"959e5098-562a-471a-9396-fed74ed113b5","Type":"ContainerDied","Data":"59c448657088234167a192371822778dbeb991f6e1ab089a4dd5b2b5c2935a28"} Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.116825 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59c448657088234167a192371822778dbeb991f6e1ab089a4dd5b2b5c2935a28" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.116897 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.130035 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" event={"ID":"9831d515-5a42-4f3e-98a9-aca48b1093d8","Type":"ContainerStarted","Data":"6e1f65696443753ce0127d1e140df737565cd9f12a35b330602ebe448d9ae475"} Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.141998 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.153019 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" event={"ID":"a24ccf53-2780-49dd-9f26-d80a42631230","Type":"ContainerStarted","Data":"1b4260d6fb742b721ab645f6e9a26fc57404090afddad4e0b32c180a0d62b636"} Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.153144 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.187542 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.187907 4921 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/959e5098-562a-471a-9396-fed74ed113b5-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.187920 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp5br\" (UniqueName: \"kubernetes.io/projected/959e5098-562a-471a-9396-fed74ed113b5-kube-api-access-kp5br\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.187931 4921 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/959e5098-562a-471a-9396-fed74ed113b5-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.198509 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.698489539 +0000 UTC m=+102.914711463 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.222557 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" podStartSLOduration=78.222540548 podStartE2EDuration="1m18.222540548s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.14167647 +0000 UTC m=+102.357898394" watchObservedRunningTime="2025-12-10 12:58:25.222540548 +0000 UTC m=+102.438762472" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.286110 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-sjkkc" podStartSLOduration=78.286091794 podStartE2EDuration="1m18.286091794s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.225031147 +0000 UTC m=+102.441253081" watchObservedRunningTime="2025-12-10 12:58:25.286091794 +0000 UTC m=+102.502313718" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.288655 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.288937 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.788925843 +0000 UTC m=+103.005147767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.339661 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.390554 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.390877 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:25.890865966 +0000 UTC m=+103.107087890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.503482 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.504087 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.004071082 +0000 UTC m=+103.220292996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.535731 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5lrds" podStartSLOduration=78.535709601 podStartE2EDuration="1m18.535709601s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.286953058 +0000 UTC m=+102.503174982" watchObservedRunningTime="2025-12-10 12:58:25.535709601 +0000 UTC m=+102.751931535" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.591808 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:25 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:25 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:25 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.591899 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.607615 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.608094 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.108077873 +0000 UTC m=+103.324299797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.630751 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" podStartSLOduration=78.630722512 podStartE2EDuration="1m18.630722512s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.541815301 +0000 UTC m=+102.758037225" watchObservedRunningTime="2025-12-10 12:58:25.630722512 +0000 UTC m=+102.846944436" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.708754 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.709048 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.209032839 +0000 UTC m=+103.425254763 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.779691 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zkglp" podStartSLOduration=78.779672572 podStartE2EDuration="1m18.779672572s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.724653083 +0000 UTC m=+102.940875017" watchObservedRunningTime="2025-12-10 12:58:25.779672572 +0000 UTC m=+102.995894496" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.810091 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.810618 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.310603581 +0000 UTC m=+103.526825515 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.901360 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" podStartSLOduration=78.901343753 podStartE2EDuration="1m18.901343753s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:25.89905441 +0000 UTC m=+103.115276344" watchObservedRunningTime="2025-12-10 12:58:25.901343753 +0000 UTC m=+103.117565677" Dec 10 12:58:25 crc kubenswrapper[4921]: I1210 12:58:25.911190 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:25 crc kubenswrapper[4921]: E1210 12:58:25.911588 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.411575397 +0000 UTC m=+103.627797321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.016142 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.016682 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.516659378 +0000 UTC m=+103.732881302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.019145 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d7btl"] Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.088564 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tjjqr"] Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.088845 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="959e5098-562a-471a-9396-fed74ed113b5" containerName="collect-profiles" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.088863 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="959e5098-562a-471a-9396-fed74ed113b5" containerName="collect-profiles" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.088981 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="959e5098-562a-471a-9396-fed74ed113b5" containerName="collect-profiles" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.089862 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.115799 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.117615 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.118053 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-catalog-content\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.118134 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzslv\" (UniqueName: \"kubernetes.io/projected/18c18bb4-4d43-4f8e-aa0d-808598954883-kube-api-access-mzslv\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.118166 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-utilities\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.118301 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.618278042 +0000 UTC m=+103.834499966 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.130947 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tjjqr"] Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.197926 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7btl" event={"ID":"dc9190a3-c02a-48f7-ab9f-8be8951f3f37","Type":"ContainerStarted","Data":"07d8bc36a6840a7abe7f2c9c0644bd4b4d776f5e6d6759f40b2b88e8592d31c8"} Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.220272 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.220570 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-catalog-content\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.220743 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzslv\" (UniqueName: \"kubernetes.io/projected/18c18bb4-4d43-4f8e-aa0d-808598954883-kube-api-access-mzslv\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.221993 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-utilities\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.222930 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.72291704 +0000 UTC m=+103.939138964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.261910 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-catalog-content\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.275783 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-utilities\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.288921 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzslv\" (UniqueName: \"kubernetes.io/projected/18c18bb4-4d43-4f8e-aa0d-808598954883-kube-api-access-mzslv\") pod \"redhat-marketplace-tjjqr\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.325228 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.325510 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.326565 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.826531239 +0000 UTC m=+104.042753163 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.331584 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9cc656f0-ce36-474b-9fa3-1ce9f43675a4-metrics-certs\") pod \"network-metrics-daemon-j2nnf\" (UID: \"9cc656f0-ce36-474b-9fa3-1ce9f43675a4\") " pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.428884 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.429327 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:26.929305695 +0000 UTC m=+104.145527619 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.470887 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-54ksn"] Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.472949 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.473996 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.479716 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.481814 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.504100 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-54ksn"] Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.537472 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.537748 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-utilities\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.537846 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r7pt\" (UniqueName: \"kubernetes.io/projected/13ae054d-506c-4469-baaa-c8cf6347b2f1-kube-api-access-4r7pt\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.537885 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-catalog-content\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.538000 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.037985546 +0000 UTC m=+104.254207470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.594788 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j2nnf" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.596668 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:26 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:26 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:26 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.596712 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.629141 4921 patch_prober.go:28] interesting pod/apiserver-76f77b778f-qq2ff container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]log ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]etcd ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/generic-apiserver-start-informers ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/max-in-flight-filter ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 10 12:58:26 crc kubenswrapper[4921]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 10 12:58:26 crc kubenswrapper[4921]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/project.openshift.io-projectcache ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/openshift.io-startinformers ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 10 12:58:26 crc kubenswrapper[4921]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 10 12:58:26 crc kubenswrapper[4921]: livez check failed Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.629229 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" podUID="617cf016-593a-4d56-b104-e450cd6368ee" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.639443 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r7pt\" (UniqueName: \"kubernetes.io/projected/13ae054d-506c-4469-baaa-c8cf6347b2f1-kube-api-access-4r7pt\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.639495 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.639520 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-catalog-content\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.639588 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-utilities\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.640010 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-utilities\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.640513 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-catalog-content\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.640577 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.140548556 +0000 UTC m=+104.356770670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.676865 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r7pt\" (UniqueName: \"kubernetes.io/projected/13ae054d-506c-4469-baaa-c8cf6347b2f1-kube-api-access-4r7pt\") pod \"redhat-marketplace-54ksn\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.712097 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gq8pd"] Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.751531 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.753202 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.762650 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.763149 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.263099672 +0000 UTC m=+104.479321596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.780738 4921 patch_prober.go:28] interesting pod/console-f9d7485db-8pb7c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.31:8443/health\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.780837 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8pb7c" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" probeResult="failure" output="Get \"https://10.217.0.31:8443/health\": dial tcp 10.217.0.31:8443: connect: connection refused" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.842586 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.868558 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.870591 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.370575689 +0000 UTC m=+104.586797613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.927886 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-prv2t"] Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.932261 4921 patch_prober.go:28] interesting pod/downloads-7954f5f757-7smdt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.932342 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7smdt" podUID="23dc36ea-02d4-493e-863b-56126c624ab0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.932848 4921 patch_prober.go:28] interesting pod/downloads-7954f5f757-7smdt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.932882 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7smdt" podUID="23dc36ea-02d4-493e-863b-56126c624ab0" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 10 12:58:26 crc kubenswrapper[4921]: I1210 12:58:26.975898 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:26 crc kubenswrapper[4921]: E1210 12:58:26.976676 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.476662167 +0000 UTC m=+104.692884091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:26 crc kubenswrapper[4921]: W1210 12:58:26.979644 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod013be189_8bbe_40ef_af47_51c8fd79aa1a.slice/crio-191be80117b1c8b8f8a7fadcc533c1072b10871f0095f953c7c473d71d89cd98 WatchSource:0}: Error finding container 191be80117b1c8b8f8a7fadcc533c1072b10871f0095f953c7c473d71d89cd98: Status 404 returned error can't find the container with id 191be80117b1c8b8f8a7fadcc533c1072b10871f0095f953c7c473d71d89cd98 Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.077654 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.078106 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.578091336 +0000 UTC m=+104.794313250 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.179379 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.179772 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.679758422 +0000 UTC m=+104.895980346 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.220573 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wblqq"] Dec 10 12:58:27 crc kubenswrapper[4921]: W1210 12:58:27.240733 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2629ed61_02c7_450a_9d32_d3277755229f.slice/crio-5bad5d29a6c558be94520dcdbc181b05c2690972d832678d1c8456ceb3a2f205 WatchSource:0}: Error finding container 5bad5d29a6c558be94520dcdbc181b05c2690972d832678d1c8456ceb3a2f205: Status 404 returned error can't find the container with id 5bad5d29a6c558be94520dcdbc181b05c2690972d832678d1c8456ceb3a2f205 Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.281800 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.283107 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.783086794 +0000 UTC m=+104.999308928 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.296965 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" event={"ID":"9831d515-5a42-4f3e-98a9-aca48b1093d8","Type":"ContainerStarted","Data":"ca43fee7cd1b2ee678f176050e41f0f942826a757a68bc373a820179d99f87dd"} Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.328705 4921 generic.go:334] "Generic (PLEG): container finished" podID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerID="171291850c17ea9c82f5496895df42e63387659dece0b904c3b863b20f5e446d" exitCode=0 Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.328842 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7btl" event={"ID":"dc9190a3-c02a-48f7-ab9f-8be8951f3f37","Type":"ContainerDied","Data":"171291850c17ea9c82f5496895df42e63387659dece0b904c3b863b20f5e446d"} Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.348417 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.360254 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prv2t" event={"ID":"013be189-8bbe-40ef-af47-51c8fd79aa1a","Type":"ContainerStarted","Data":"191be80117b1c8b8f8a7fadcc533c1072b10871f0095f953c7c473d71d89cd98"} Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.386513 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.387165 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.88694728 +0000 UTC m=+105.103169204 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.387525 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.388129 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:27.888118543 +0000 UTC m=+105.104340467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.397309 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerStarted","Data":"c66094c5dc4a9867db94e7bb5d27245526f31ea2327a440d15adbf35ef0c990f"} Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.397352 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerStarted","Data":"0a83db4bf61de026cf30aa9d7a6c42ea3b071c35ba26c1db7777ee15f6f563ae"} Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.514081 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4gf95"] Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.516703 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.537574 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.543907 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.546172 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.046147995 +0000 UTC m=+105.262369909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.584009 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.632016 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:27 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:27 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:27 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.632067 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.648497 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4gf95"] Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.652835 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-utilities\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.652879 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-catalog-content\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.652948 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb9lm\" (UniqueName: \"kubernetes.io/projected/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-kube-api-access-rb9lm\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.653005 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.653418 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.153379595 +0000 UTC m=+105.369601519 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.772842 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.773768 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-utilities\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.773801 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-catalog-content\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.773865 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb9lm\" (UniqueName: \"kubernetes.io/projected/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-kube-api-access-rb9lm\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.774784 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-catalog-content\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.774897 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.274877812 +0000 UTC m=+105.491099736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.775143 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-utilities\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.818380 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb9lm\" (UniqueName: \"kubernetes.io/projected/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-kube-api-access-rb9lm\") pod \"redhat-operators-4gf95\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.825582 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fz9kj"] Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.826978 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.857905 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tjjqr"] Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.879362 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-utilities\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.879448 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-catalog-content\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.879521 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.879554 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5qqj\" (UniqueName: \"kubernetes.io/projected/2410954d-1bc9-4174-9639-5717425cff64-kube-api-access-g5qqj\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.879883 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.37987061 +0000 UTC m=+105.596092524 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.888952 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fz9kj"] Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.912280 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.985187 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.985341 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5qqj\" (UniqueName: \"kubernetes.io/projected/2410954d-1bc9-4174-9639-5717425cff64-kube-api-access-g5qqj\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.985382 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-utilities\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.985435 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-catalog-content\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.985834 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-catalog-content\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:27 crc kubenswrapper[4921]: E1210 12:58:27.985900 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.485884526 +0000 UTC m=+105.702106450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:27 crc kubenswrapper[4921]: I1210 12:58:27.986317 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-utilities\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.024657 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-j2nnf"] Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.042238 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5qqj\" (UniqueName: \"kubernetes.io/projected/2410954d-1bc9-4174-9639-5717425cff64-kube-api-access-g5qqj\") pod \"redhat-operators-fz9kj\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.083220 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-54ksn"] Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.086842 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.087227 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.587205382 +0000 UTC m=+105.803427506 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.188174 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.188489 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.688475476 +0000 UTC m=+105.904697390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.240821 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.290534 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.291140 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.791121959 +0000 UTC m=+106.007343883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.394985 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.395190 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.89516059 +0000 UTC m=+106.111382514 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.395453 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.395827 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.895810809 +0000 UTC m=+106.112032733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.439618 4921 generic.go:334] "Generic (PLEG): container finished" podID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerID="6d9e4e06c9128374c4a14638b03454e0fb9bbf24c3629bf4f1c4f9f332867a64" exitCode=0 Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.440416 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prv2t" event={"ID":"013be189-8bbe-40ef-af47-51c8fd79aa1a","Type":"ContainerDied","Data":"6d9e4e06c9128374c4a14638b03454e0fb9bbf24c3629bf4f1c4f9f332867a64"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.493430 4921 generic.go:334] "Generic (PLEG): container finished" podID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerID="c66094c5dc4a9867db94e7bb5d27245526f31ea2327a440d15adbf35ef0c990f" exitCode=0 Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.493591 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerDied","Data":"c66094c5dc4a9867db94e7bb5d27245526f31ea2327a440d15adbf35ef0c990f"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.496034 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.496956 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:28.996942119 +0000 UTC m=+106.213164043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.512531 4921 generic.go:334] "Generic (PLEG): container finished" podID="2629ed61-02c7-450a-9d32-d3277755229f" containerID="c71e73fc77ba41a2d5837a572875003714c95530b5a5cd96c68dfefbe632ed8f" exitCode=0 Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.512599 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblqq" event={"ID":"2629ed61-02c7-450a-9d32-d3277755229f","Type":"ContainerDied","Data":"c71e73fc77ba41a2d5837a572875003714c95530b5a5cd96c68dfefbe632ed8f"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.512626 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblqq" event={"ID":"2629ed61-02c7-450a-9d32-d3277755229f","Type":"ContainerStarted","Data":"5bad5d29a6c558be94520dcdbc181b05c2690972d832678d1c8456ceb3a2f205"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.554224 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" event={"ID":"9831d515-5a42-4f3e-98a9-aca48b1093d8","Type":"ContainerStarted","Data":"6ed70c517d3749ef09b52181fb64653b87c8d04ff64666acc4d659d0e3b71156"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.561934 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" event={"ID":"9cc656f0-ce36-474b-9fa3-1ce9f43675a4","Type":"ContainerStarted","Data":"2ad683a5a9e45682b3f8ac4a833dff46d68304b28d6c91631bf27f505aeba8ef"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.567466 4921 generic.go:334] "Generic (PLEG): container finished" podID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerID="d5a801efcd5959c4103de70e313cb4c41f9f913806aa87c4319b7fd8e692ee4c" exitCode=0 Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.567539 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tjjqr" event={"ID":"18c18bb4-4d43-4f8e-aa0d-808598954883","Type":"ContainerDied","Data":"d5a801efcd5959c4103de70e313cb4c41f9f913806aa87c4319b7fd8e692ee4c"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.567565 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tjjqr" event={"ID":"18c18bb4-4d43-4f8e-aa0d-808598954883","Type":"ContainerStarted","Data":"f85f34c4266a88c004655984d011866b890d2a67412337fc61eaffc00eb926dd"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.601632 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.602499 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.102486293 +0000 UTC m=+106.318708217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.612353 4921 generic.go:334] "Generic (PLEG): container finished" podID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerID="3acc9830db4fd617b204c4002f866571a43197b2efc219c62f5a6d575fc86432" exitCode=0 Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.612428 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-54ksn" event={"ID":"13ae054d-506c-4469-baaa-c8cf6347b2f1","Type":"ContainerDied","Data":"3acc9830db4fd617b204c4002f866571a43197b2efc219c62f5a6d575fc86432"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.612458 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-54ksn" event={"ID":"13ae054d-506c-4469-baaa-c8cf6347b2f1","Type":"ContainerStarted","Data":"9801705926e9a0e5fc3930a481a46ed8019781b8ba14ceb1a09d198f3c3de5d7"} Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.632718 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:28 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:28 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:28 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.632766 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.687565 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4gf95"] Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.703492 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.704554 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.204533459 +0000 UTC m=+106.420755383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.806186 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.806776 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.30675882 +0000 UTC m=+106.522980744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.907492 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:28 crc kubenswrapper[4921]: E1210 12:58:28.908372 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.408354583 +0000 UTC m=+106.624576507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.920652 4921 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 10 12:58:28 crc kubenswrapper[4921]: I1210 12:58:28.980688 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fz9kj"] Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.013065 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: E1210 12:58:29.014634 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.514611946 +0000 UTC m=+106.730833870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.115938 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:29 crc kubenswrapper[4921]: E1210 12:58:29.116345 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.616297532 +0000 UTC m=+106.832519456 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.218371 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: E1210 12:58:29.218695 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.718678298 +0000 UTC m=+106.934900212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.319767 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:29 crc kubenswrapper[4921]: E1210 12:58:29.320040 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.819999454 +0000 UTC m=+107.036221378 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.320314 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: E1210 12:58:29.320695 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 12:58:29.820687833 +0000 UTC m=+107.036909757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-556st" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.387358 4921 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-10T12:58:28.920697206Z","Handler":null,"Name":""} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.391650 4921 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.391707 4921 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.422747 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.475382 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.528523 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.533832 4921 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.533880 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.551175 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dhxg9" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.578643 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-556st\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.589879 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:29 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:29 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:29 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.589961 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.623704 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.689952 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" event={"ID":"9831d515-5a42-4f3e-98a9-aca48b1093d8","Type":"ContainerStarted","Data":"6c9b145152694099f7f79932e78edd02520eafb393027aaf9f74168ea25998ec"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.703450 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" event={"ID":"9cc656f0-ce36-474b-9fa3-1ce9f43675a4","Type":"ContainerStarted","Data":"844e13456d5ae2234a9485d85f3cecb5eae8b9e0ae213fd0a9c587ddd322c93c"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.703499 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j2nnf" event={"ID":"9cc656f0-ce36-474b-9fa3-1ce9f43675a4","Type":"ContainerStarted","Data":"85f26843dad8d394dfb9b17f0be11422df0feaaaf2fa58dfc04a2fb2e03f04d9"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.709356 4921 generic.go:334] "Generic (PLEG): container finished" podID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerID="27285eae6a970dfbb00261f4bc36b0a131cb4bd179aa769b01dd1c8ec2ac5c23" exitCode=0 Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.709453 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerDied","Data":"27285eae6a970dfbb00261f4bc36b0a131cb4bd179aa769b01dd1c8ec2ac5c23"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.709491 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerStarted","Data":"af69f731d477ad0a290d0bd6300d39e398ff4b5bf1858def9faae0a669384342"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.715350 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-m9sh2" podStartSLOduration=15.715332361 podStartE2EDuration="15.715332361s" podCreationTimestamp="2025-12-10 12:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:29.714633311 +0000 UTC m=+106.930855235" watchObservedRunningTime="2025-12-10 12:58:29.715332361 +0000 UTC m=+106.931554285" Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.725425 4921 generic.go:334] "Generic (PLEG): container finished" podID="2410954d-1bc9-4174-9639-5717425cff64" containerID="df7a95fbedcf2525691a08c277048b8ae21104ecb97355c9cd4859f7837ca6eb" exitCode=0 Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.725484 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerDied","Data":"df7a95fbedcf2525691a08c277048b8ae21104ecb97355c9cd4859f7837ca6eb"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.725520 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerStarted","Data":"5e2544327283ce38d788a5e2df45cb7f016d7770dd10655ffd09615c5c487a6e"} Dec 10 12:58:29 crc kubenswrapper[4921]: I1210 12:58:29.782447 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-j2nnf" podStartSLOduration=82.782419795 podStartE2EDuration="1m22.782419795s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:29.778091715 +0000 UTC m=+106.994313659" watchObservedRunningTime="2025-12-10 12:58:29.782419795 +0000 UTC m=+106.998641729" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.255539 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-556st"] Dec 10 12:58:30 crc kubenswrapper[4921]: W1210 12:58:30.337729 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae0b7e23_ca18_4adc_aa3a_551c273d45af.slice/crio-b3dff19ef12d78ee380f2f4bafff2fde5db273d6d124ba07cbc9a3d2014d5647 WatchSource:0}: Error finding container b3dff19ef12d78ee380f2f4bafff2fde5db273d6d124ba07cbc9a3d2014d5647: Status 404 returned error can't find the container with id b3dff19ef12d78ee380f2f4bafff2fde5db273d6d124ba07cbc9a3d2014d5647 Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.580486 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:30 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:30 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:30 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.580593 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.611563 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.612650 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.618617 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.619145 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.637701 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.659144 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f294181c-db48-4209-92a9-73d5045dad0f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.659196 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f294181c-db48-4209-92a9-73d5045dad0f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.764858 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f294181c-db48-4209-92a9-73d5045dad0f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.765377 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f294181c-db48-4209-92a9-73d5045dad0f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.765506 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f294181c-db48-4209-92a9-73d5045dad0f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.800429 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f294181c-db48-4209-92a9-73d5045dad0f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.843283 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-556st" event={"ID":"ae0b7e23-ca18-4adc-aa3a-551c273d45af","Type":"ContainerStarted","Data":"b3dff19ef12d78ee380f2f4bafff2fde5db273d6d124ba07cbc9a3d2014d5647"} Dec 10 12:58:30 crc kubenswrapper[4921]: I1210 12:58:30.972148 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.227113 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.490488 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.496583 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qq2ff" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.592796 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:31 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:31 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:31 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.592871 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.891531 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-556st" event={"ID":"ae0b7e23-ca18-4adc-aa3a-551c273d45af","Type":"ContainerStarted","Data":"d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98"} Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.891649 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.921158 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-556st" podStartSLOduration=84.921134953 podStartE2EDuration="1m24.921134953s" podCreationTimestamp="2025-12-10 12:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:31.918004876 +0000 UTC m=+109.134226800" watchObservedRunningTime="2025-12-10 12:58:31.921134953 +0000 UTC m=+109.137356877" Dec 10 12:58:31 crc kubenswrapper[4921]: I1210 12:58:31.965534 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 12:58:32 crc kubenswrapper[4921]: W1210 12:58:32.038972 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf294181c_db48_4209_92a9_73d5045dad0f.slice/crio-d41c0550f00ec69af5abd9f3977485b1dfb781130c6f5f99a9fc72c6b4d430a3 WatchSource:0}: Error finding container d41c0550f00ec69af5abd9f3977485b1dfb781130c6f5f99a9fc72c6b4d430a3: Status 404 returned error can't find the container with id d41c0550f00ec69af5abd9f3977485b1dfb781130c6f5f99a9fc72c6b4d430a3 Dec 10 12:58:32 crc kubenswrapper[4921]: I1210 12:58:32.580839 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:32 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:32 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:32 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:32 crc kubenswrapper[4921]: I1210 12:58:32.580890 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:32 crc kubenswrapper[4921]: I1210 12:58:32.627193 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-sh5l7" Dec 10 12:58:32 crc kubenswrapper[4921]: I1210 12:58:32.924002 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f294181c-db48-4209-92a9-73d5045dad0f","Type":"ContainerStarted","Data":"d41c0550f00ec69af5abd9f3977485b1dfb781130c6f5f99a9fc72c6b4d430a3"} Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.025073 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.029675 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.033069 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.041582 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.041909 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.153105 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58499bd-bb38-4162-ba45-38d12afe6096-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.153231 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58499bd-bb38-4162-ba45-38d12afe6096-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.254625 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58499bd-bb38-4162-ba45-38d12afe6096-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.254799 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58499bd-bb38-4162-ba45-38d12afe6096-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.254924 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58499bd-bb38-4162-ba45-38d12afe6096-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.307969 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58499bd-bb38-4162-ba45-38d12afe6096-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.404181 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.588549 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:33 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:33 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:33 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:33 crc kubenswrapper[4921]: I1210 12:58:33.588678 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:34 crc kubenswrapper[4921]: I1210 12:58:34.035610 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f294181c-db48-4209-92a9-73d5045dad0f","Type":"ContainerStarted","Data":"446a38e862e86c511fd7695ba4d86613ea785b1c9aa5c7ad496c2094e354e438"} Dec 10 12:58:34 crc kubenswrapper[4921]: I1210 12:58:34.069708 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.069685465 podStartE2EDuration="4.069685465s" podCreationTimestamp="2025-12-10 12:58:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:34.063015539 +0000 UTC m=+111.279237473" watchObservedRunningTime="2025-12-10 12:58:34.069685465 +0000 UTC m=+111.285907389" Dec 10 12:58:34 crc kubenswrapper[4921]: I1210 12:58:34.195791 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 12:58:34 crc kubenswrapper[4921]: I1210 12:58:34.584674 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:34 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:34 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:34 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:34 crc kubenswrapper[4921]: I1210 12:58:34.585051 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:35 crc kubenswrapper[4921]: I1210 12:58:35.043517 4921 generic.go:334] "Generic (PLEG): container finished" podID="f294181c-db48-4209-92a9-73d5045dad0f" containerID="446a38e862e86c511fd7695ba4d86613ea785b1c9aa5c7ad496c2094e354e438" exitCode=0 Dec 10 12:58:35 crc kubenswrapper[4921]: I1210 12:58:35.043587 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f294181c-db48-4209-92a9-73d5045dad0f","Type":"ContainerDied","Data":"446a38e862e86c511fd7695ba4d86613ea785b1c9aa5c7ad496c2094e354e438"} Dec 10 12:58:35 crc kubenswrapper[4921]: I1210 12:58:35.046036 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b58499bd-bb38-4162-ba45-38d12afe6096","Type":"ContainerStarted","Data":"e10ef9597a78a637c43c8f83c0dab3a60eea6ac46bcd709b4a0bed18eb18dd2e"} Dec 10 12:58:35 crc kubenswrapper[4921]: I1210 12:58:35.580852 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:35 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:35 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:35 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:35 crc kubenswrapper[4921]: I1210 12:58:35.580909 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.119530 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b58499bd-bb38-4162-ba45-38d12afe6096","Type":"ContainerStarted","Data":"7c1d37474454bcec49adb1d0426b3750412ec75b4c8d23d13d020ffef0244fc9"} Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.148717 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.148688114 podStartE2EDuration="3.148688114s" podCreationTimestamp="2025-12-10 12:58:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:58:36.142301227 +0000 UTC m=+113.358523151" watchObservedRunningTime="2025-12-10 12:58:36.148688114 +0000 UTC m=+113.364910038" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.485952 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.536292 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f294181c-db48-4209-92a9-73d5045dad0f-kube-api-access\") pod \"f294181c-db48-4209-92a9-73d5045dad0f\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.536430 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f294181c-db48-4209-92a9-73d5045dad0f-kubelet-dir\") pod \"f294181c-db48-4209-92a9-73d5045dad0f\" (UID: \"f294181c-db48-4209-92a9-73d5045dad0f\") " Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.536701 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f294181c-db48-4209-92a9-73d5045dad0f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f294181c-db48-4209-92a9-73d5045dad0f" (UID: "f294181c-db48-4209-92a9-73d5045dad0f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.547671 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f294181c-db48-4209-92a9-73d5045dad0f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f294181c-db48-4209-92a9-73d5045dad0f" (UID: "f294181c-db48-4209-92a9-73d5045dad0f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.581654 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:36 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:36 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:36 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.581700 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.638279 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f294181c-db48-4209-92a9-73d5045dad0f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.638311 4921 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f294181c-db48-4209-92a9-73d5045dad0f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.748098 4921 patch_prober.go:28] interesting pod/console-f9d7485db-8pb7c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.31:8443/health\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.748172 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8pb7c" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" probeResult="failure" output="Get \"https://10.217.0.31:8443/health\": dial tcp 10.217.0.31:8443: connect: connection refused" Dec 10 12:58:36 crc kubenswrapper[4921]: I1210 12:58:36.938526 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7smdt" Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.166497 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.166505 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f294181c-db48-4209-92a9-73d5045dad0f","Type":"ContainerDied","Data":"d41c0550f00ec69af5abd9f3977485b1dfb781130c6f5f99a9fc72c6b4d430a3"} Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.166930 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d41c0550f00ec69af5abd9f3977485b1dfb781130c6f5f99a9fc72c6b4d430a3" Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.195080 4921 generic.go:334] "Generic (PLEG): container finished" podID="b58499bd-bb38-4162-ba45-38d12afe6096" containerID="7c1d37474454bcec49adb1d0426b3750412ec75b4c8d23d13d020ffef0244fc9" exitCode=0 Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.208274 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b58499bd-bb38-4162-ba45-38d12afe6096","Type":"ContainerDied","Data":"7c1d37474454bcec49adb1d0426b3750412ec75b4c8d23d13d020ffef0244fc9"} Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.578635 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:37 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:37 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:37 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:37 crc kubenswrapper[4921]: I1210 12:58:37.578698 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.580720 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:38 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:38 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:38 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.581329 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.607353 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.676001 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58499bd-bb38-4162-ba45-38d12afe6096-kube-api-access\") pod \"b58499bd-bb38-4162-ba45-38d12afe6096\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.676219 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58499bd-bb38-4162-ba45-38d12afe6096-kubelet-dir\") pod \"b58499bd-bb38-4162-ba45-38d12afe6096\" (UID: \"b58499bd-bb38-4162-ba45-38d12afe6096\") " Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.676292 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b58499bd-bb38-4162-ba45-38d12afe6096-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b58499bd-bb38-4162-ba45-38d12afe6096" (UID: "b58499bd-bb38-4162-ba45-38d12afe6096"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.677164 4921 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58499bd-bb38-4162-ba45-38d12afe6096-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.681560 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b58499bd-bb38-4162-ba45-38d12afe6096-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b58499bd-bb38-4162-ba45-38d12afe6096" (UID: "b58499bd-bb38-4162-ba45-38d12afe6096"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:58:38 crc kubenswrapper[4921]: I1210 12:58:38.779242 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58499bd-bb38-4162-ba45-38d12afe6096-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:58:39 crc kubenswrapper[4921]: I1210 12:58:39.254784 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b58499bd-bb38-4162-ba45-38d12afe6096","Type":"ContainerDied","Data":"e10ef9597a78a637c43c8f83c0dab3a60eea6ac46bcd709b4a0bed18eb18dd2e"} Dec 10 12:58:39 crc kubenswrapper[4921]: I1210 12:58:39.254827 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e10ef9597a78a637c43c8f83c0dab3a60eea6ac46bcd709b4a0bed18eb18dd2e" Dec 10 12:58:39 crc kubenswrapper[4921]: I1210 12:58:39.254915 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 12:58:39 crc kubenswrapper[4921]: I1210 12:58:39.580691 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:39 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:39 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:39 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:39 crc kubenswrapper[4921]: I1210 12:58:39.580751 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:40 crc kubenswrapper[4921]: I1210 12:58:40.579938 4921 patch_prober.go:28] interesting pod/router-default-5444994796-xtvnd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 12:58:40 crc kubenswrapper[4921]: [-]has-synced failed: reason withheld Dec 10 12:58:40 crc kubenswrapper[4921]: [+]process-running ok Dec 10 12:58:40 crc kubenswrapper[4921]: healthz check failed Dec 10 12:58:40 crc kubenswrapper[4921]: I1210 12:58:40.580218 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xtvnd" podUID="11c4ff79-c760-4e5d-8594-8dd82990dec0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 12:58:41 crc kubenswrapper[4921]: I1210 12:58:41.581728 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:41 crc kubenswrapper[4921]: I1210 12:58:41.585135 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-xtvnd" Dec 10 12:58:42 crc kubenswrapper[4921]: I1210 12:58:42.716991 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 12:58:46 crc kubenswrapper[4921]: I1210 12:58:46.746563 4921 patch_prober.go:28] interesting pod/console-f9d7485db-8pb7c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.31:8443/health\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Dec 10 12:58:46 crc kubenswrapper[4921]: I1210 12:58:46.747170 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8pb7c" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" probeResult="failure" output="Get \"https://10.217.0.31:8443/health\": dial tcp 10.217.0.31:8443: connect: connection refused" Dec 10 12:58:49 crc kubenswrapper[4921]: I1210 12:58:49.630729 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 12:58:56 crc kubenswrapper[4921]: I1210 12:58:56.753378 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:56 crc kubenswrapper[4921]: I1210 12:58:56.763284 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 12:58:56 crc kubenswrapper[4921]: I1210 12:58:56.855876 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kwm5b" Dec 10 12:59:07 crc kubenswrapper[4921]: E1210 12:59:07.228846 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 10 12:59:07 crc kubenswrapper[4921]: E1210 12:59:07.229767 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p56tc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gq8pd_openshift-marketplace(3188aa17-7df1-4bc3-a929-7e5888cb32c8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:07 crc kubenswrapper[4921]: E1210 12:59:07.230934 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gq8pd" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" Dec 10 12:59:07 crc kubenswrapper[4921]: E1210 12:59:07.248511 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 10 12:59:07 crc kubenswrapper[4921]: E1210 12:59:07.248655 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-knksg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wblqq_openshift-marketplace(2629ed61-02c7-450a-9d32-d3277755229f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:07 crc kubenswrapper[4921]: E1210 12:59:07.249860 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wblqq" podUID="2629ed61-02c7-450a-9d32-d3277755229f" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.413902 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 12:59:08 crc kubenswrapper[4921]: E1210 12:59:08.414193 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f294181c-db48-4209-92a9-73d5045dad0f" containerName="pruner" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.414205 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f294181c-db48-4209-92a9-73d5045dad0f" containerName="pruner" Dec 10 12:59:08 crc kubenswrapper[4921]: E1210 12:59:08.414218 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b58499bd-bb38-4162-ba45-38d12afe6096" containerName="pruner" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.414224 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b58499bd-bb38-4162-ba45-38d12afe6096" containerName="pruner" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.414327 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f294181c-db48-4209-92a9-73d5045dad0f" containerName="pruner" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.414354 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b58499bd-bb38-4162-ba45-38d12afe6096" containerName="pruner" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.417619 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.418794 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.420902 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.421048 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.537536 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.538359 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.639668 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.640013 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.639970 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.664661 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:08 crc kubenswrapper[4921]: I1210 12:59:08.746330 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.162194 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.163333 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.167829 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.168149 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.175234 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.180330 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.265447 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.265514 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.268668 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.276977 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.289987 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.290006 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.304458 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.557819 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 12:59:10 crc kubenswrapper[4921]: I1210 12:59:10.582445 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:59:10 crc kubenswrapper[4921]: E1210 12:59:10.908008 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gq8pd" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" Dec 10 12:59:10 crc kubenswrapper[4921]: E1210 12:59:10.908516 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wblqq" podUID="2629ed61-02c7-450a-9d32-d3277755229f" Dec 10 12:59:11 crc kubenswrapper[4921]: E1210 12:59:11.227814 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 10 12:59:11 crc kubenswrapper[4921]: E1210 12:59:11.227960 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rb9lm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4gf95_openshift-marketplace(a9ab7ff6-04d8-45b5-93ba-12db1abe7091): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:11 crc kubenswrapper[4921]: E1210 12:59:11.229177 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4gf95" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.569373 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4gf95" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.642044 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.642293 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4r7pt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-54ksn_openshift-marketplace(13ae054d-506c-4469-baaa-c8cf6347b2f1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.643460 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-54ksn" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.658432 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.658608 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g5qqj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-fz9kj_openshift-marketplace(2410954d-1bc9-4174-9639-5717425cff64): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.659823 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-fz9kj" podUID="2410954d-1bc9-4174-9639-5717425cff64" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.675325 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.675752 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mzslv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tjjqr_openshift-marketplace(18c18bb4-4d43-4f8e-aa0d-808598954883): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:12 crc kubenswrapper[4921]: E1210 12:59:12.676938 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tjjqr" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" Dec 10 12:59:12 crc kubenswrapper[4921]: I1210 12:59:12.804872 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 12:59:12 crc kubenswrapper[4921]: I1210 12:59:12.805954 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:12 crc kubenswrapper[4921]: I1210 12:59:12.812273 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 12:59:12 crc kubenswrapper[4921]: I1210 12:59:12.906582 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:12 crc kubenswrapper[4921]: I1210 12:59:12.906697 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8e5f473-25ff-49f8-8c92-3201eaefae48-kube-api-access\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:12 crc kubenswrapper[4921]: I1210 12:59:12.906729 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-var-lock\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.013804 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8e5f473-25ff-49f8-8c92-3201eaefae48-kube-api-access\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.013891 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-var-lock\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.014043 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.014197 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.015662 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-var-lock\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.047551 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8e5f473-25ff-49f8-8c92-3201eaefae48-kube-api-access\") pod \"installer-9-crc\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:13 crc kubenswrapper[4921]: I1210 12:59:13.131898 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.429111 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-54ksn" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.429586 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tjjqr" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.429650 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-fz9kj" podUID="2410954d-1bc9-4174-9639-5717425cff64" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.501526 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.501683 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m7kjl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-prv2t_openshift-marketplace(013be189-8bbe-40ef-af47-51c8fd79aa1a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.504547 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-prv2t" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.525547 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-prv2t" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.588830 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.588967 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w2m2v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-d7btl_openshift-marketplace(dc9190a3-c02a-48f7-ab9f-8be8951f3f37): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 12:59:14 crc kubenswrapper[4921]: E1210 12:59:14.593324 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-d7btl" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" Dec 10 12:59:14 crc kubenswrapper[4921]: W1210 12:59:14.978909 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2e594b61_36df_4504_98be_6ee7f0ba3cb6.slice/crio-e835d8e5734ab2a45889da56fd1c493896ab0756056a78eb0da4499994130d55 WatchSource:0}: Error finding container e835d8e5734ab2a45889da56fd1c493896ab0756056a78eb0da4499994130d55: Status 404 returned error can't find the container with id e835d8e5734ab2a45889da56fd1c493896ab0756056a78eb0da4499994130d55 Dec 10 12:59:14 crc kubenswrapper[4921]: W1210 12:59:14.982055 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-8e1aec06f04b8e6f774039c2c6c1e726a9b21315b075786d9ea74ce5811c8079 WatchSource:0}: Error finding container 8e1aec06f04b8e6f774039c2c6c1e726a9b21315b075786d9ea74ce5811c8079: Status 404 returned error can't find the container with id 8e1aec06f04b8e6f774039c2c6c1e726a9b21315b075786d9ea74ce5811c8079 Dec 10 12:59:14 crc kubenswrapper[4921]: I1210 12:59:14.985350 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 12:59:14 crc kubenswrapper[4921]: I1210 12:59:14.989249 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 12:59:15 crc kubenswrapper[4921]: W1210 12:59:15.003128 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf8e5f473_25ff_49f8_8c92_3201eaefae48.slice/crio-c78dd785a2e7a210720cd383fa9eb4536badbea4e7b73a4296757e5bae87e9fc WatchSource:0}: Error finding container c78dd785a2e7a210720cd383fa9eb4536badbea4e7b73a4296757e5bae87e9fc: Status 404 returned error can't find the container with id c78dd785a2e7a210720cd383fa9eb4536badbea4e7b73a4296757e5bae87e9fc Dec 10 12:59:15 crc kubenswrapper[4921]: W1210 12:59:15.146300 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-915e4d7695af92587f6c7620ba6a0c5465d31d43608512f49e70de4929e612b0 WatchSource:0}: Error finding container 915e4d7695af92587f6c7620ba6a0c5465d31d43608512f49e70de4929e612b0: Status 404 returned error can't find the container with id 915e4d7695af92587f6c7620ba6a0c5465d31d43608512f49e70de4929e612b0 Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.531286 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1ed38dd39718ba8a46471b6113c4b4b0b8417c16f5c245f1795aba42e6e6c5bd"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.531338 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3cb41e9da23da3f58f910459088bc63b5552cb95c5240a8194ecdf718923f607"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.532275 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.535077 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2e594b61-36df-4504-98be-6ee7f0ba3cb6","Type":"ContainerStarted","Data":"61cf5de144de46f18f23a505a7451a264519902c271f9955ae614ad469bfaec6"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.535110 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2e594b61-36df-4504-98be-6ee7f0ba3cb6","Type":"ContainerStarted","Data":"e835d8e5734ab2a45889da56fd1c493896ab0756056a78eb0da4499994130d55"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.537459 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4f8cee003a8a6893c3df47d1bec3291a6e6252b1b932d0539e8dbc657f85a318"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.537492 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"8e1aec06f04b8e6f774039c2c6c1e726a9b21315b075786d9ea74ce5811c8079"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.547216 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f8e5f473-25ff-49f8-8c92-3201eaefae48","Type":"ContainerStarted","Data":"fa90c0921d55c4769bd31234e74cf7c2bd91fc85e6045f0b881bdb66e0f671bb"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.547251 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f8e5f473-25ff-49f8-8c92-3201eaefae48","Type":"ContainerStarted","Data":"c78dd785a2e7a210720cd383fa9eb4536badbea4e7b73a4296757e5bae87e9fc"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.548791 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"73275a9a0a090dcac37252d50e91d2aae532c7d7f0dd9fef378b184df33800cd"} Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.548828 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"915e4d7695af92587f6c7620ba6a0c5465d31d43608512f49e70de4929e612b0"} Dec 10 12:59:15 crc kubenswrapper[4921]: E1210 12:59:15.550052 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-d7btl" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.593596 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.593575163 podStartE2EDuration="3.593575163s" podCreationTimestamp="2025-12-10 12:59:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:59:15.591221267 +0000 UTC m=+152.807443231" watchObservedRunningTime="2025-12-10 12:59:15.593575163 +0000 UTC m=+152.809797087" Dec 10 12:59:15 crc kubenswrapper[4921]: I1210 12:59:15.644434 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=7.644417098 podStartE2EDuration="7.644417098s" podCreationTimestamp="2025-12-10 12:59:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:59:15.6273566 +0000 UTC m=+152.843578524" watchObservedRunningTime="2025-12-10 12:59:15.644417098 +0000 UTC m=+152.860639032" Dec 10 12:59:16 crc kubenswrapper[4921]: I1210 12:59:16.711768 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 12:59:16 crc kubenswrapper[4921]: I1210 12:59:16.712353 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 12:59:17 crc kubenswrapper[4921]: I1210 12:59:17.571810 4921 generic.go:334] "Generic (PLEG): container finished" podID="2e594b61-36df-4504-98be-6ee7f0ba3cb6" containerID="61cf5de144de46f18f23a505a7451a264519902c271f9955ae614ad469bfaec6" exitCode=0 Dec 10 12:59:17 crc kubenswrapper[4921]: I1210 12:59:17.571874 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2e594b61-36df-4504-98be-6ee7f0ba3cb6","Type":"ContainerDied","Data":"61cf5de144de46f18f23a505a7451a264519902c271f9955ae614ad469bfaec6"} Dec 10 12:59:18 crc kubenswrapper[4921]: I1210 12:59:18.890829 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:18 crc kubenswrapper[4921]: I1210 12:59:18.946975 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kube-api-access\") pod \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " Dec 10 12:59:18 crc kubenswrapper[4921]: I1210 12:59:18.947182 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kubelet-dir\") pod \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\" (UID: \"2e594b61-36df-4504-98be-6ee7f0ba3cb6\") " Dec 10 12:59:18 crc kubenswrapper[4921]: I1210 12:59:18.947293 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2e594b61-36df-4504-98be-6ee7f0ba3cb6" (UID: "2e594b61-36df-4504-98be-6ee7f0ba3cb6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:59:18 crc kubenswrapper[4921]: I1210 12:59:18.947463 4921 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:18 crc kubenswrapper[4921]: I1210 12:59:18.952664 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2e594b61-36df-4504-98be-6ee7f0ba3cb6" (UID: "2e594b61-36df-4504-98be-6ee7f0ba3cb6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:19 crc kubenswrapper[4921]: I1210 12:59:19.049267 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e594b61-36df-4504-98be-6ee7f0ba3cb6-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:19 crc kubenswrapper[4921]: I1210 12:59:19.583372 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2e594b61-36df-4504-98be-6ee7f0ba3cb6","Type":"ContainerDied","Data":"e835d8e5734ab2a45889da56fd1c493896ab0756056a78eb0da4499994130d55"} Dec 10 12:59:19 crc kubenswrapper[4921]: I1210 12:59:19.583430 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 12:59:19 crc kubenswrapper[4921]: I1210 12:59:19.583434 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e835d8e5734ab2a45889da56fd1c493896ab0756056a78eb0da4499994130d55" Dec 10 12:59:24 crc kubenswrapper[4921]: I1210 12:59:24.608017 4921 generic.go:334] "Generic (PLEG): container finished" podID="2629ed61-02c7-450a-9d32-d3277755229f" containerID="a61cdcfafa10428011a11a0aa21471f31a7f0a8f8f7fb2defd896ec34886d546" exitCode=0 Dec 10 12:59:24 crc kubenswrapper[4921]: I1210 12:59:24.608114 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblqq" event={"ID":"2629ed61-02c7-450a-9d32-d3277755229f","Type":"ContainerDied","Data":"a61cdcfafa10428011a11a0aa21471f31a7f0a8f8f7fb2defd896ec34886d546"} Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.674854 4921 generic.go:334] "Generic (PLEG): container finished" podID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerID="418db52cf692eb7a1132e138aa187cc82f7085b116022a2156af738620eca3a9" exitCode=0 Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.674976 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tjjqr" event={"ID":"18c18bb4-4d43-4f8e-aa0d-808598954883","Type":"ContainerDied","Data":"418db52cf692eb7a1132e138aa187cc82f7085b116022a2156af738620eca3a9"} Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.679471 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerStarted","Data":"a494a725f061f95bff6eeb83acb4a50ee3d83fb8a901b7b80cb657662a58a5fa"} Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.683117 4921 generic.go:334] "Generic (PLEG): container finished" podID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerID="1233ddcd8bcd9fe1f9cbfb3f838f219cdeac1a2b7ee6c97958983fe429bccdb6" exitCode=0 Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.683174 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-54ksn" event={"ID":"13ae054d-506c-4469-baaa-c8cf6347b2f1","Type":"ContainerDied","Data":"1233ddcd8bcd9fe1f9cbfb3f838f219cdeac1a2b7ee6c97958983fe429bccdb6"} Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.687315 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerStarted","Data":"d29dc45badb24b08a288c64a158855cce5c927405f82bd1e8d118a63b311f2ee"} Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.693640 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblqq" event={"ID":"2629ed61-02c7-450a-9d32-d3277755229f","Type":"ContainerStarted","Data":"f922c4b2d1473c624529d8a43f53aca3846f71648e0e5a3ac05779b68a813182"} Dec 10 12:59:33 crc kubenswrapper[4921]: I1210 12:59:33.793724 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wblqq" podStartSLOduration=8.891163467 podStartE2EDuration="1m9.793700219s" podCreationTimestamp="2025-12-10 12:58:24 +0000 UTC" firstStartedPulling="2025-12-10 12:58:28.524578947 +0000 UTC m=+105.740800871" lastFinishedPulling="2025-12-10 12:59:29.427115699 +0000 UTC m=+166.643337623" observedRunningTime="2025-12-10 12:59:33.762864455 +0000 UTC m=+170.979086379" watchObservedRunningTime="2025-12-10 12:59:33.793700219 +0000 UTC m=+171.009922143" Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.703211 4921 generic.go:334] "Generic (PLEG): container finished" podID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerID="d29dc45badb24b08a288c64a158855cce5c927405f82bd1e8d118a63b311f2ee" exitCode=0 Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.703301 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerDied","Data":"d29dc45badb24b08a288c64a158855cce5c927405f82bd1e8d118a63b311f2ee"} Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.711051 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tjjqr" event={"ID":"18c18bb4-4d43-4f8e-aa0d-808598954883","Type":"ContainerStarted","Data":"f547e27ca22f766a0d275cb18bb543cd23f5d3afa9fd4182c04e077bdf684465"} Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.717824 4921 generic.go:334] "Generic (PLEG): container finished" podID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerID="a494a725f061f95bff6eeb83acb4a50ee3d83fb8a901b7b80cb657662a58a5fa" exitCode=0 Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.717915 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerDied","Data":"a494a725f061f95bff6eeb83acb4a50ee3d83fb8a901b7b80cb657662a58a5fa"} Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.728117 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerStarted","Data":"76460874a6208d09ae0556c239aded8c58975c3c788d6ba54b3979bbe5c3e149"} Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.732652 4921 generic.go:334] "Generic (PLEG): container finished" podID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerID="4f768ce22e121236e077def4b3bbccdbb217dac33d83261ffb5fd69e6a8d9df6" exitCode=0 Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.732733 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7btl" event={"ID":"dc9190a3-c02a-48f7-ab9f-8be8951f3f37","Type":"ContainerDied","Data":"4f768ce22e121236e077def4b3bbccdbb217dac33d83261ffb5fd69e6a8d9df6"} Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.753107 4921 generic.go:334] "Generic (PLEG): container finished" podID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerID="77de361e4774e867a67bbe3533abcbd1ee2c6dada0f6237695a6713f06328b14" exitCode=0 Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.753165 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prv2t" event={"ID":"013be189-8bbe-40ef-af47-51c8fd79aa1a","Type":"ContainerDied","Data":"77de361e4774e867a67bbe3533abcbd1ee2c6dada0f6237695a6713f06328b14"} Dec 10 12:59:34 crc kubenswrapper[4921]: I1210 12:59:34.786007 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tjjqr" podStartSLOduration=3.970499399 podStartE2EDuration="1m9.785968927s" podCreationTimestamp="2025-12-10 12:58:25 +0000 UTC" firstStartedPulling="2025-12-10 12:58:28.573578419 +0000 UTC m=+105.789800343" lastFinishedPulling="2025-12-10 12:59:34.389047947 +0000 UTC m=+171.605269871" observedRunningTime="2025-12-10 12:59:34.781255505 +0000 UTC m=+171.997477419" watchObservedRunningTime="2025-12-10 12:59:34.785968927 +0000 UTC m=+172.002190861" Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.340964 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.341363 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.428459 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.760026 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerStarted","Data":"4b610590e3691ced7b3936648b7da7f594462b00d24f96bcdd4734fafec99c11"} Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.764283 4921 generic.go:334] "Generic (PLEG): container finished" podID="2410954d-1bc9-4174-9639-5717425cff64" containerID="76460874a6208d09ae0556c239aded8c58975c3c788d6ba54b3979bbe5c3e149" exitCode=0 Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.764382 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerDied","Data":"76460874a6208d09ae0556c239aded8c58975c3c788d6ba54b3979bbe5c3e149"} Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.767502 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-54ksn" event={"ID":"13ae054d-506c-4469-baaa-c8cf6347b2f1","Type":"ContainerStarted","Data":"c386d575c15f23b50c8ed3208d26d77e577fc3bf8f7820934028baebd326be7b"} Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.774883 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerStarted","Data":"70756b488fd188cd8ad61aea740158053814d07f7727e5f9b0db5b3d16feacd1"} Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.812860 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4gf95" podStartSLOduration=3.143859345 podStartE2EDuration="1m8.812840365s" podCreationTimestamp="2025-12-10 12:58:27 +0000 UTC" firstStartedPulling="2025-12-10 12:58:29.721494142 +0000 UTC m=+106.937716066" lastFinishedPulling="2025-12-10 12:59:35.390475162 +0000 UTC m=+172.606697086" observedRunningTime="2025-12-10 12:59:35.788875203 +0000 UTC m=+173.005097127" watchObservedRunningTime="2025-12-10 12:59:35.812840365 +0000 UTC m=+173.029062289" Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.814270 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gq8pd" podStartSLOduration=4.987392488 podStartE2EDuration="1m11.814263095s" podCreationTimestamp="2025-12-10 12:58:24 +0000 UTC" firstStartedPulling="2025-12-10 12:58:28.501881826 +0000 UTC m=+105.718103750" lastFinishedPulling="2025-12-10 12:59:35.328752433 +0000 UTC m=+172.544974357" observedRunningTime="2025-12-10 12:59:35.810309054 +0000 UTC m=+173.026530978" watchObservedRunningTime="2025-12-10 12:59:35.814263095 +0000 UTC m=+173.030485019" Dec 10 12:59:35 crc kubenswrapper[4921]: I1210 12:59:35.830407 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-54ksn" podStartSLOduration=3.904580065 podStartE2EDuration="1m9.830371066s" podCreationTimestamp="2025-12-10 12:58:26 +0000 UTC" firstStartedPulling="2025-12-10 12:58:28.632542668 +0000 UTC m=+105.848764592" lastFinishedPulling="2025-12-10 12:59:34.558333629 +0000 UTC m=+171.774555593" observedRunningTime="2025-12-10 12:59:35.829590014 +0000 UTC m=+173.045811948" watchObservedRunningTime="2025-12-10 12:59:35.830371066 +0000 UTC m=+173.046592990" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.475519 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.475843 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.527421 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.783230 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerStarted","Data":"c869aad86158390bf33895a99023b4e9f6dcc38bbade5b8c3b82486d49595451"} Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.786563 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prv2t" event={"ID":"013be189-8bbe-40ef-af47-51c8fd79aa1a","Type":"ContainerStarted","Data":"eab8a24d40aa141b9fc0f0ac0165e18e9787bdc26511d79daf6d3c304302fbd9"} Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.828233 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-prv2t" podStartSLOduration=5.606626169 podStartE2EDuration="1m12.828216401s" podCreationTimestamp="2025-12-10 12:58:24 +0000 UTC" firstStartedPulling="2025-12-10 12:58:28.443184125 +0000 UTC m=+105.659406049" lastFinishedPulling="2025-12-10 12:59:35.664774357 +0000 UTC m=+172.880996281" observedRunningTime="2025-12-10 12:59:36.824950199 +0000 UTC m=+174.041172123" watchObservedRunningTime="2025-12-10 12:59:36.828216401 +0000 UTC m=+174.044438335" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.829432 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fz9kj" podStartSLOduration=3.9349352939999998 podStartE2EDuration="1m9.829423114s" podCreationTimestamp="2025-12-10 12:58:27 +0000 UTC" firstStartedPulling="2025-12-10 12:58:29.728236369 +0000 UTC m=+106.944458293" lastFinishedPulling="2025-12-10 12:59:35.622724189 +0000 UTC m=+172.838946113" observedRunningTime="2025-12-10 12:59:36.806492392 +0000 UTC m=+174.022714326" watchObservedRunningTime="2025-12-10 12:59:36.829423114 +0000 UTC m=+174.045645038" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.843911 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.843951 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:59:36 crc kubenswrapper[4921]: I1210 12:59:36.907522 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:59:37 crc kubenswrapper[4921]: I1210 12:59:37.812877 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7btl" event={"ID":"dc9190a3-c02a-48f7-ab9f-8be8951f3f37","Type":"ContainerStarted","Data":"4085a2f870ac0bad809d33b1e1b2d793834cc937698175197320af9fa6ac1723"} Dec 10 12:59:37 crc kubenswrapper[4921]: I1210 12:59:37.834966 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d7btl" podStartSLOduration=4.731098016 podStartE2EDuration="1m13.834947665s" podCreationTimestamp="2025-12-10 12:58:24 +0000 UTC" firstStartedPulling="2025-12-10 12:58:27.348022198 +0000 UTC m=+104.564244122" lastFinishedPulling="2025-12-10 12:59:36.451871847 +0000 UTC m=+173.668093771" observedRunningTime="2025-12-10 12:59:37.83086137 +0000 UTC m=+175.047083294" watchObservedRunningTime="2025-12-10 12:59:37.834947665 +0000 UTC m=+175.051169589" Dec 10 12:59:37 crc kubenswrapper[4921]: I1210 12:59:37.912676 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:59:37 crc kubenswrapper[4921]: I1210 12:59:37.913007 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:59:38 crc kubenswrapper[4921]: I1210 12:59:38.241558 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:59:38 crc kubenswrapper[4921]: I1210 12:59:38.241635 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:59:38 crc kubenswrapper[4921]: I1210 12:59:38.960935 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4gf95" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="registry-server" probeResult="failure" output=< Dec 10 12:59:38 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 12:59:38 crc kubenswrapper[4921]: > Dec 10 12:59:39 crc kubenswrapper[4921]: I1210 12:59:39.277597 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fz9kj" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="registry-server" probeResult="failure" output=< Dec 10 12:59:39 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 12:59:39 crc kubenswrapper[4921]: > Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.557432 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.558780 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.599121 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.599789 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.628773 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.650004 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.715965 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g7bns"] Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.893722 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:59:44 crc kubenswrapper[4921]: I1210 12:59:44.900127 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:59:45 crc kubenswrapper[4921]: I1210 12:59:45.142648 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:59:45 crc kubenswrapper[4921]: I1210 12:59:45.142703 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:59:45 crc kubenswrapper[4921]: I1210 12:59:45.202065 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:59:45 crc kubenswrapper[4921]: I1210 12:59:45.393413 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:59:45 crc kubenswrapper[4921]: I1210 12:59:45.906577 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:59:46 crc kubenswrapper[4921]: I1210 12:59:46.524984 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:59:46 crc kubenswrapper[4921]: I1210 12:59:46.679304 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-prv2t"] Dec 10 12:59:46 crc kubenswrapper[4921]: I1210 12:59:46.711309 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 12:59:46 crc kubenswrapper[4921]: I1210 12:59:46.711596 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 12:59:46 crc kubenswrapper[4921]: I1210 12:59:46.886939 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.241988 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gq8pd"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.258501 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wblqq"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.258713 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wblqq" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="registry-server" containerID="cri-o://f922c4b2d1473c624529d8a43f53aca3846f71648e0e5a3ac05779b68a813182" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.306586 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d7btl"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.308023 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gwnkp"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.308243 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerName="marketplace-operator" containerID="cri-o://62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.317892 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wblqq"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.327460 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-54ksn"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.333333 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pwjmn"] Dec 10 12:59:47 crc kubenswrapper[4921]: E1210 12:59:47.333709 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e594b61-36df-4504-98be-6ee7f0ba3cb6" containerName="pruner" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.333863 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e594b61-36df-4504-98be-6ee7f0ba3cb6" containerName="pruner" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.334216 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e594b61-36df-4504-98be-6ee7f0ba3cb6" containerName="pruner" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.334809 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.339544 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tjjqr"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.339735 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tjjqr" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="registry-server" containerID="cri-o://f547e27ca22f766a0d275cb18bb543cd23f5d3afa9fd4182c04e077bdf684465" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.344241 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4gf95"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.344519 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4gf95" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="registry-server" containerID="cri-o://4b610590e3691ced7b3936648b7da7f594462b00d24f96bcdd4734fafec99c11" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.349806 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pwjmn"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.360951 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fz9kj"] Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.361186 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fz9kj" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="registry-server" containerID="cri-o://c869aad86158390bf33895a99023b4e9f6dcc38bbade5b8c3b82486d49595451" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.387006 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.387077 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.387126 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8rxq\" (UniqueName: \"kubernetes.io/projected/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-kube-api-access-p8rxq\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.488167 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8rxq\" (UniqueName: \"kubernetes.io/projected/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-kube-api-access-p8rxq\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.488514 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.488642 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.489955 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.495332 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.503980 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8rxq\" (UniqueName: \"kubernetes.io/projected/7a4e394f-d2a7-4f32-8da5-a2a221b267ea-kube-api-access-p8rxq\") pod \"marketplace-operator-79b997595-pwjmn\" (UID: \"7a4e394f-d2a7-4f32-8da5-a2a221b267ea\") " pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.661359 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.866466 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d7btl" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="registry-server" containerID="cri-o://4085a2f870ac0bad809d33b1e1b2d793834cc937698175197320af9fa6ac1723" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.866791 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-prv2t" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="registry-server" containerID="cri-o://eab8a24d40aa141b9fc0f0ac0165e18e9787bdc26511d79daf6d3c304302fbd9" gracePeriod=2 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.867080 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gq8pd" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="registry-server" containerID="cri-o://70756b488fd188cd8ad61aea740158053814d07f7727e5f9b0db5b3d16feacd1" gracePeriod=30 Dec 10 12:59:47 crc kubenswrapper[4921]: I1210 12:59:47.867157 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-54ksn" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="registry-server" containerID="cri-o://c386d575c15f23b50c8ed3208d26d77e577fc3bf8f7820934028baebd326be7b" gracePeriod=30 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.081773 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-54ksn"] Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.355473 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pwjmn"] Dec 10 12:59:49 crc kubenswrapper[4921]: W1210 12:59:49.488555 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a4e394f_d2a7_4f32_8da5_a2a221b267ea.slice/crio-f4e1225d1234585389f7ee4f96e19d1cfbfe4e8104a214bc4bfb4a2d0c182b57 WatchSource:0}: Error finding container f4e1225d1234585389f7ee4f96e19d1cfbfe4e8104a214bc4bfb4a2d0c182b57: Status 404 returned error can't find the container with id f4e1225d1234585389f7ee4f96e19d1cfbfe4e8104a214bc4bfb4a2d0c182b57 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.665507 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.721594 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-operator-metrics\") pod \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.721708 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr26f\" (UniqueName: \"kubernetes.io/projected/9ab094b2-78dc-4ee6-b563-a1ae064588cf-kube-api-access-kr26f\") pod \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.721749 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-trusted-ca\") pod \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\" (UID: \"9ab094b2-78dc-4ee6-b563-a1ae064588cf\") " Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.723578 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9ab094b2-78dc-4ee6-b563-a1ae064588cf" (UID: "9ab094b2-78dc-4ee6-b563-a1ae064588cf"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.728165 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9ab094b2-78dc-4ee6-b563-a1ae064588cf" (UID: "9ab094b2-78dc-4ee6-b563-a1ae064588cf"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.728300 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ab094b2-78dc-4ee6-b563-a1ae064588cf-kube-api-access-kr26f" (OuterVolumeSpecName: "kube-api-access-kr26f") pod "9ab094b2-78dc-4ee6-b563-a1ae064588cf" (UID: "9ab094b2-78dc-4ee6-b563-a1ae064588cf"). InnerVolumeSpecName "kube-api-access-kr26f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.823839 4921 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.823881 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr26f\" (UniqueName: \"kubernetes.io/projected/9ab094b2-78dc-4ee6-b563-a1ae064588cf-kube-api-access-kr26f\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.823891 4921 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ab094b2-78dc-4ee6-b563-a1ae064588cf-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.879193 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" event={"ID":"7a4e394f-d2a7-4f32-8da5-a2a221b267ea","Type":"ContainerStarted","Data":"f4e1225d1234585389f7ee4f96e19d1cfbfe4e8104a214bc4bfb4a2d0c182b57"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.881749 4921 generic.go:334] "Generic (PLEG): container finished" podID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerID="f547e27ca22f766a0d275cb18bb543cd23f5d3afa9fd4182c04e077bdf684465" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.881812 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tjjqr" event={"ID":"18c18bb4-4d43-4f8e-aa0d-808598954883","Type":"ContainerDied","Data":"f547e27ca22f766a0d275cb18bb543cd23f5d3afa9fd4182c04e077bdf684465"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.883712 4921 generic.go:334] "Generic (PLEG): container finished" podID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerID="4b610590e3691ced7b3936648b7da7f594462b00d24f96bcdd4734fafec99c11" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.883749 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerDied","Data":"4b610590e3691ced7b3936648b7da7f594462b00d24f96bcdd4734fafec99c11"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.886241 4921 generic.go:334] "Generic (PLEG): container finished" podID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerID="4085a2f870ac0bad809d33b1e1b2d793834cc937698175197320af9fa6ac1723" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.886285 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7btl" event={"ID":"dc9190a3-c02a-48f7-ab9f-8be8951f3f37","Type":"ContainerDied","Data":"4085a2f870ac0bad809d33b1e1b2d793834cc937698175197320af9fa6ac1723"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.887768 4921 generic.go:334] "Generic (PLEG): container finished" podID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerID="c386d575c15f23b50c8ed3208d26d77e577fc3bf8f7820934028baebd326be7b" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.887808 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-54ksn" event={"ID":"13ae054d-506c-4469-baaa-c8cf6347b2f1","Type":"ContainerDied","Data":"c386d575c15f23b50c8ed3208d26d77e577fc3bf8f7820934028baebd326be7b"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.889198 4921 generic.go:334] "Generic (PLEG): container finished" podID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerID="eab8a24d40aa141b9fc0f0ac0165e18e9787bdc26511d79daf6d3c304302fbd9" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.889239 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prv2t" event={"ID":"013be189-8bbe-40ef-af47-51c8fd79aa1a","Type":"ContainerDied","Data":"eab8a24d40aa141b9fc0f0ac0165e18e9787bdc26511d79daf6d3c304302fbd9"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.890562 4921 generic.go:334] "Generic (PLEG): container finished" podID="2410954d-1bc9-4174-9639-5717425cff64" containerID="c869aad86158390bf33895a99023b4e9f6dcc38bbade5b8c3b82486d49595451" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.890604 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerDied","Data":"c869aad86158390bf33895a99023b4e9f6dcc38bbade5b8c3b82486d49595451"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.892307 4921 generic.go:334] "Generic (PLEG): container finished" podID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerID="70756b488fd188cd8ad61aea740158053814d07f7727e5f9b0db5b3d16feacd1" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.892360 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerDied","Data":"70756b488fd188cd8ad61aea740158053814d07f7727e5f9b0db5b3d16feacd1"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.893907 4921 generic.go:334] "Generic (PLEG): container finished" podID="2629ed61-02c7-450a-9d32-d3277755229f" containerID="f922c4b2d1473c624529d8a43f53aca3846f71648e0e5a3ac05779b68a813182" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.893950 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblqq" event={"ID":"2629ed61-02c7-450a-9d32-d3277755229f","Type":"ContainerDied","Data":"f922c4b2d1473c624529d8a43f53aca3846f71648e0e5a3ac05779b68a813182"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.895015 4921 generic.go:334] "Generic (PLEG): container finished" podID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerID="62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134" exitCode=0 Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.895039 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" event={"ID":"9ab094b2-78dc-4ee6-b563-a1ae064588cf","Type":"ContainerDied","Data":"62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.895053 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" event={"ID":"9ab094b2-78dc-4ee6-b563-a1ae064588cf","Type":"ContainerDied","Data":"708ceaed6929037dd5a0bf29e1d6c98c53678ce0e91e26d0085f545b17bc8e36"} Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.895070 4921 scope.go:117] "RemoveContainer" containerID="62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.895170 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gwnkp" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.919365 4921 scope.go:117] "RemoveContainer" containerID="62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134" Dec 10 12:59:49 crc kubenswrapper[4921]: E1210 12:59:49.920604 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134\": container with ID starting with 62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134 not found: ID does not exist" containerID="62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.920741 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134"} err="failed to get container status \"62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134\": rpc error: code = NotFound desc = could not find container \"62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134\": container with ID starting with 62b5f1b9adb7a7d4bdd0d933c021bf203f10d9a2822e93985363200b7132b134 not found: ID does not exist" Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.934420 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gwnkp"] Dec 10 12:59:49 crc kubenswrapper[4921]: I1210 12:59:49.940072 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gwnkp"] Dec 10 12:59:50 crc kubenswrapper[4921]: I1210 12:59:50.686348 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.225174 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" path="/var/lib/kubelet/pods/9ab094b2-78dc-4ee6-b563-a1ae064588cf/volumes" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.234603 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.337224 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.356422 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-utilities\") pod \"18c18bb4-4d43-4f8e-aa0d-808598954883\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.356576 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-catalog-content\") pod \"18c18bb4-4d43-4f8e-aa0d-808598954883\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.356602 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzslv\" (UniqueName: \"kubernetes.io/projected/18c18bb4-4d43-4f8e-aa0d-808598954883-kube-api-access-mzslv\") pod \"18c18bb4-4d43-4f8e-aa0d-808598954883\" (UID: \"18c18bb4-4d43-4f8e-aa0d-808598954883\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.359477 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-utilities" (OuterVolumeSpecName: "utilities") pod "18c18bb4-4d43-4f8e-aa0d-808598954883" (UID: "18c18bb4-4d43-4f8e-aa0d-808598954883"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.362712 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18c18bb4-4d43-4f8e-aa0d-808598954883-kube-api-access-mzslv" (OuterVolumeSpecName: "kube-api-access-mzslv") pod "18c18bb4-4d43-4f8e-aa0d-808598954883" (UID: "18c18bb4-4d43-4f8e-aa0d-808598954883"). InnerVolumeSpecName "kube-api-access-mzslv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.386337 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18c18bb4-4d43-4f8e-aa0d-808598954883" (UID: "18c18bb4-4d43-4f8e-aa0d-808598954883"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.457825 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-utilities\") pod \"2410954d-1bc9-4174-9639-5717425cff64\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.457932 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-catalog-content\") pod \"2410954d-1bc9-4174-9639-5717425cff64\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.457971 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5qqj\" (UniqueName: \"kubernetes.io/projected/2410954d-1bc9-4174-9639-5717425cff64-kube-api-access-g5qqj\") pod \"2410954d-1bc9-4174-9639-5717425cff64\" (UID: \"2410954d-1bc9-4174-9639-5717425cff64\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.458229 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.458241 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18c18bb4-4d43-4f8e-aa0d-808598954883-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.458251 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzslv\" (UniqueName: \"kubernetes.io/projected/18c18bb4-4d43-4f8e-aa0d-808598954883-kube-api-access-mzslv\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.458632 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-utilities" (OuterVolumeSpecName: "utilities") pod "2410954d-1bc9-4174-9639-5717425cff64" (UID: "2410954d-1bc9-4174-9639-5717425cff64"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.462073 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2410954d-1bc9-4174-9639-5717425cff64-kube-api-access-g5qqj" (OuterVolumeSpecName: "kube-api-access-g5qqj") pod "2410954d-1bc9-4174-9639-5717425cff64" (UID: "2410954d-1bc9-4174-9639-5717425cff64"). InnerVolumeSpecName "kube-api-access-g5qqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.560535 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.560566 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5qqj\" (UniqueName: \"kubernetes.io/projected/2410954d-1bc9-4174-9639-5717425cff64-kube-api-access-g5qqj\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.683976 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.761956 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7kjl\" (UniqueName: \"kubernetes.io/projected/013be189-8bbe-40ef-af47-51c8fd79aa1a-kube-api-access-m7kjl\") pod \"013be189-8bbe-40ef-af47-51c8fd79aa1a\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.762160 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-catalog-content\") pod \"013be189-8bbe-40ef-af47-51c8fd79aa1a\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.762195 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-utilities\") pod \"013be189-8bbe-40ef-af47-51c8fd79aa1a\" (UID: \"013be189-8bbe-40ef-af47-51c8fd79aa1a\") " Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.763504 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-utilities" (OuterVolumeSpecName: "utilities") pod "013be189-8bbe-40ef-af47-51c8fd79aa1a" (UID: "013be189-8bbe-40ef-af47-51c8fd79aa1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.766281 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/013be189-8bbe-40ef-af47-51c8fd79aa1a-kube-api-access-m7kjl" (OuterVolumeSpecName: "kube-api-access-m7kjl") pod "013be189-8bbe-40ef-af47-51c8fd79aa1a" (UID: "013be189-8bbe-40ef-af47-51c8fd79aa1a"). InnerVolumeSpecName "kube-api-access-m7kjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.817305 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "013be189-8bbe-40ef-af47-51c8fd79aa1a" (UID: "013be189-8bbe-40ef-af47-51c8fd79aa1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.865183 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7kjl\" (UniqueName: \"kubernetes.io/projected/013be189-8bbe-40ef-af47-51c8fd79aa1a-kube-api-access-m7kjl\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.865224 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.865238 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/013be189-8bbe-40ef-af47-51c8fd79aa1a-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.930506 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" event={"ID":"7a4e394f-d2a7-4f32-8da5-a2a221b267ea","Type":"ContainerStarted","Data":"68c6e423172c14dd13abdbbd48204eb72a78325de7e9aead24ea7dceca931c0b"} Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.941033 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tjjqr" event={"ID":"18c18bb4-4d43-4f8e-aa0d-808598954883","Type":"ContainerDied","Data":"f85f34c4266a88c004655984d011866b890d2a67412337fc61eaffc00eb926dd"} Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.941059 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tjjqr" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.941097 4921 scope.go:117] "RemoveContainer" containerID="f547e27ca22f766a0d275cb18bb543cd23f5d3afa9fd4182c04e077bdf684465" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.958161 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz9kj" event={"ID":"2410954d-1bc9-4174-9639-5717425cff64","Type":"ContainerDied","Data":"5e2544327283ce38d788a5e2df45cb7f016d7770dd10655ffd09615c5c487a6e"} Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.958151 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz9kj" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.969590 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2410954d-1bc9-4174-9639-5717425cff64" (UID: "2410954d-1bc9-4174-9639-5717425cff64"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.970068 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.970175 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prv2t" event={"ID":"013be189-8bbe-40ef-af47-51c8fd79aa1a","Type":"ContainerDied","Data":"191be80117b1c8b8f8a7fadcc533c1072b10871f0095f953c7c473d71d89cd98"} Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.970229 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-prv2t" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.980083 4921 scope.go:117] "RemoveContainer" containerID="418db52cf692eb7a1132e138aa187cc82f7085b116022a2156af738620eca3a9" Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.988554 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tjjqr"] Dec 10 12:59:51 crc kubenswrapper[4921]: I1210 12:59:51.993656 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tjjqr"] Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.032369 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-prv2t"] Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.043096 4921 scope.go:117] "RemoveContainer" containerID="d5a801efcd5959c4103de70e313cb4c41f9f913806aa87c4319b7fd8e692ee4c" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.046584 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-prv2t"] Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.072555 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-catalog-content\") pod \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.072652 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2m2v\" (UniqueName: \"kubernetes.io/projected/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-kube-api-access-w2m2v\") pod \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.072709 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-utilities\") pod \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\" (UID: \"dc9190a3-c02a-48f7-ab9f-8be8951f3f37\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.072999 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2410954d-1bc9-4174-9639-5717425cff64-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.073849 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-utilities" (OuterVolumeSpecName: "utilities") pod "dc9190a3-c02a-48f7-ab9f-8be8951f3f37" (UID: "dc9190a3-c02a-48f7-ab9f-8be8951f3f37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.076029 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-kube-api-access-w2m2v" (OuterVolumeSpecName: "kube-api-access-w2m2v") pod "dc9190a3-c02a-48f7-ab9f-8be8951f3f37" (UID: "dc9190a3-c02a-48f7-ab9f-8be8951f3f37"). InnerVolumeSpecName "kube-api-access-w2m2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.100588 4921 scope.go:117] "RemoveContainer" containerID="c869aad86158390bf33895a99023b4e9f6dcc38bbade5b8c3b82486d49595451" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.126409 4921 scope.go:117] "RemoveContainer" containerID="76460874a6208d09ae0556c239aded8c58975c3c788d6ba54b3979bbe5c3e149" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.129872 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.166380 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc9190a3-c02a-48f7-ab9f-8be8951f3f37" (UID: "dc9190a3-c02a-48f7-ab9f-8be8951f3f37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.174510 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-utilities\") pod \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.174629 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-catalog-content\") pod \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.174671 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p56tc\" (UniqueName: \"kubernetes.io/projected/3188aa17-7df1-4bc3-a929-7e5888cb32c8-kube-api-access-p56tc\") pod \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\" (UID: \"3188aa17-7df1-4bc3-a929-7e5888cb32c8\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.174905 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2m2v\" (UniqueName: \"kubernetes.io/projected/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-kube-api-access-w2m2v\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.218487 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.218512 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9190a3-c02a-48f7-ab9f-8be8951f3f37-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.190637 4921 scope.go:117] "RemoveContainer" containerID="df7a95fbedcf2525691a08c277048b8ae21104ecb97355c9cd4859f7837ca6eb" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.179228 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3188aa17-7df1-4bc3-a929-7e5888cb32c8-kube-api-access-p56tc" (OuterVolumeSpecName: "kube-api-access-p56tc") pod "3188aa17-7df1-4bc3-a929-7e5888cb32c8" (UID: "3188aa17-7df1-4bc3-a929-7e5888cb32c8"). InnerVolumeSpecName "kube-api-access-p56tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.182028 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-utilities" (OuterVolumeSpecName: "utilities") pod "3188aa17-7df1-4bc3-a929-7e5888cb32c8" (UID: "3188aa17-7df1-4bc3-a929-7e5888cb32c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.263873 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3188aa17-7df1-4bc3-a929-7e5888cb32c8" (UID: "3188aa17-7df1-4bc3-a929-7e5888cb32c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.283103 4921 scope.go:117] "RemoveContainer" containerID="eab8a24d40aa141b9fc0f0ac0165e18e9787bdc26511d79daf6d3c304302fbd9" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.310798 4921 scope.go:117] "RemoveContainer" containerID="77de361e4774e867a67bbe3533abcbd1ee2c6dada0f6237695a6713f06328b14" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.319914 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.319944 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p56tc\" (UniqueName: \"kubernetes.io/projected/3188aa17-7df1-4bc3-a929-7e5888cb32c8-kube-api-access-p56tc\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.319955 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3188aa17-7df1-4bc3-a929-7e5888cb32c8-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.328602 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.331831 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fz9kj"] Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.335194 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fz9kj"] Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.347926 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.350685 4921 scope.go:117] "RemoveContainer" containerID="6d9e4e06c9128374c4a14638b03454e0fb9bbf24c3629bf4f1c4f9f332867a64" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.372898 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.420843 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb9lm\" (UniqueName: \"kubernetes.io/projected/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-kube-api-access-rb9lm\") pod \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.420898 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-catalog-content\") pod \"2629ed61-02c7-450a-9d32-d3277755229f\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.420990 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-utilities\") pod \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.421010 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knksg\" (UniqueName: \"kubernetes.io/projected/2629ed61-02c7-450a-9d32-d3277755229f-kube-api-access-knksg\") pod \"2629ed61-02c7-450a-9d32-d3277755229f\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.421034 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-utilities\") pod \"2629ed61-02c7-450a-9d32-d3277755229f\" (UID: \"2629ed61-02c7-450a-9d32-d3277755229f\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.421062 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-catalog-content\") pod \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\" (UID: \"a9ab7ff6-04d8-45b5-93ba-12db1abe7091\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.421681 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-utilities" (OuterVolumeSpecName: "utilities") pod "a9ab7ff6-04d8-45b5-93ba-12db1abe7091" (UID: "a9ab7ff6-04d8-45b5-93ba-12db1abe7091"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.422716 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-utilities" (OuterVolumeSpecName: "utilities") pod "2629ed61-02c7-450a-9d32-d3277755229f" (UID: "2629ed61-02c7-450a-9d32-d3277755229f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.424280 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-kube-api-access-rb9lm" (OuterVolumeSpecName: "kube-api-access-rb9lm") pod "a9ab7ff6-04d8-45b5-93ba-12db1abe7091" (UID: "a9ab7ff6-04d8-45b5-93ba-12db1abe7091"). InnerVolumeSpecName "kube-api-access-rb9lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.424838 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2629ed61-02c7-450a-9d32-d3277755229f-kube-api-access-knksg" (OuterVolumeSpecName: "kube-api-access-knksg") pod "2629ed61-02c7-450a-9d32-d3277755229f" (UID: "2629ed61-02c7-450a-9d32-d3277755229f"). InnerVolumeSpecName "kube-api-access-knksg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.480875 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2629ed61-02c7-450a-9d32-d3277755229f" (UID: "2629ed61-02c7-450a-9d32-d3277755229f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.522769 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-catalog-content\") pod \"13ae054d-506c-4469-baaa-c8cf6347b2f1\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.523096 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-utilities\") pod \"13ae054d-506c-4469-baaa-c8cf6347b2f1\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.524412 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-utilities" (OuterVolumeSpecName: "utilities") pod "13ae054d-506c-4469-baaa-c8cf6347b2f1" (UID: "13ae054d-506c-4469-baaa-c8cf6347b2f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.524551 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r7pt\" (UniqueName: \"kubernetes.io/projected/13ae054d-506c-4469-baaa-c8cf6347b2f1-kube-api-access-4r7pt\") pod \"13ae054d-506c-4469-baaa-c8cf6347b2f1\" (UID: \"13ae054d-506c-4469-baaa-c8cf6347b2f1\") " Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.525649 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.525687 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knksg\" (UniqueName: \"kubernetes.io/projected/2629ed61-02c7-450a-9d32-d3277755229f-kube-api-access-knksg\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.525703 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.525717 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.525730 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb9lm\" (UniqueName: \"kubernetes.io/projected/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-kube-api-access-rb9lm\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.525771 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2629ed61-02c7-450a-9d32-d3277755229f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.528804 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13ae054d-506c-4469-baaa-c8cf6347b2f1-kube-api-access-4r7pt" (OuterVolumeSpecName: "kube-api-access-4r7pt") pod "13ae054d-506c-4469-baaa-c8cf6347b2f1" (UID: "13ae054d-506c-4469-baaa-c8cf6347b2f1"). InnerVolumeSpecName "kube-api-access-4r7pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.543166 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9ab7ff6-04d8-45b5-93ba-12db1abe7091" (UID: "a9ab7ff6-04d8-45b5-93ba-12db1abe7091"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.553405 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13ae054d-506c-4469-baaa-c8cf6347b2f1" (UID: "13ae054d-506c-4469-baaa-c8cf6347b2f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.626948 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13ae054d-506c-4469-baaa-c8cf6347b2f1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.626988 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ab7ff6-04d8-45b5-93ba-12db1abe7091-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.627000 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r7pt\" (UniqueName: \"kubernetes.io/projected/13ae054d-506c-4469-baaa-c8cf6347b2f1-kube-api-access-4r7pt\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.977676 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4gf95" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.977688 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gf95" event={"ID":"a9ab7ff6-04d8-45b5-93ba-12db1abe7091","Type":"ContainerDied","Data":"af69f731d477ad0a290d0bd6300d39e398ff4b5bf1858def9faae0a669384342"} Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.978220 4921 scope.go:117] "RemoveContainer" containerID="4b610590e3691ced7b3936648b7da7f594462b00d24f96bcdd4734fafec99c11" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.981427 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7btl" event={"ID":"dc9190a3-c02a-48f7-ab9f-8be8951f3f37","Type":"ContainerDied","Data":"07d8bc36a6840a7abe7f2c9c0644bd4b4d776f5e6d6759f40b2b88e8592d31c8"} Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.981476 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7btl" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.984069 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-54ksn" Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.984107 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-54ksn" event={"ID":"13ae054d-506c-4469-baaa-c8cf6347b2f1","Type":"ContainerDied","Data":"9801705926e9a0e5fc3930a481a46ed8019781b8ba14ceb1a09d198f3c3de5d7"} Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.987869 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gq8pd" event={"ID":"3188aa17-7df1-4bc3-a929-7e5888cb32c8","Type":"ContainerDied","Data":"0a83db4bf61de026cf30aa9d7a6c42ea3b071c35ba26c1db7777ee15f6f563ae"} Dec 10 12:59:52 crc kubenswrapper[4921]: I1210 12:59:52.987978 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gq8pd" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.001287 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblqq" event={"ID":"2629ed61-02c7-450a-9d32-d3277755229f","Type":"ContainerDied","Data":"5bad5d29a6c558be94520dcdbc181b05c2690972d832678d1c8456ceb3a2f205"} Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.001354 4921 scope.go:117] "RemoveContainer" containerID="a494a725f061f95bff6eeb83acb4a50ee3d83fb8a901b7b80cb657662a58a5fa" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.001500 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblqq" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.008865 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.021155 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.026929 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pwjmn" podStartSLOduration=6.026912376 podStartE2EDuration="6.026912376s" podCreationTimestamp="2025-12-10 12:59:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 12:59:53.026466723 +0000 UTC m=+190.242688667" watchObservedRunningTime="2025-12-10 12:59:53.026912376 +0000 UTC m=+190.243134300" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.034549 4921 scope.go:117] "RemoveContainer" containerID="27285eae6a970dfbb00261f4bc36b0a131cb4bd179aa769b01dd1c8ec2ac5c23" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.050361 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gq8pd"] Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.060485 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gq8pd"] Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.060743 4921 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061005 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14" gracePeriod=15 Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061155 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50" gracePeriod=15 Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061205 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33" gracePeriod=15 Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061253 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb" gracePeriod=15 Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061294 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682" gracePeriod=15 Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061470 4921 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061668 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061679 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061688 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061696 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061706 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061713 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061723 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061730 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061738 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061744 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061751 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061757 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061764 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061769 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061776 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerName="marketplace-operator" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061782 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerName="marketplace-operator" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061788 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061794 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061801 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061806 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061814 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061819 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061826 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061832 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061841 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061847 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061858 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061864 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061872 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061879 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061886 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061892 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061898 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061903 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061909 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061914 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061921 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061926 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061933 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061940 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061947 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061952 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061960 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061965 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061974 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061980 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.061987 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.061993 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062002 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062008 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062015 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062020 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062027 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062033 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062039 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062045 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062053 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062059 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062067 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062073 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="extract-utilities" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062081 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062088 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="extract-content" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062176 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062189 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062196 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062204 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062211 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062217 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062225 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062230 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ab094b2-78dc-4ee6-b563-a1ae064588cf" containerName="marketplace-operator" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062239 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="2410954d-1bc9-4174-9639-5717425cff64" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062247 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062256 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062263 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062273 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="2629ed61-02c7-450a-9d32-d3277755229f" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062279 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" containerName="registry-server" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062286 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.062380 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.062409 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.070542 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4gf95"] Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.087517 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4gf95"] Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.087742 4921 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.088605 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.088924 4921 scope.go:117] "RemoveContainer" containerID="4085a2f870ac0bad809d33b1e1b2d793834cc937698175197320af9fa6ac1723" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.116054 4921 scope.go:117] "RemoveContainer" containerID="4f768ce22e121236e077def4b3bbccdbb217dac33d83261ffb5fd69e6a8d9df6" Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.123348 4921 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.147142 4921 scope.go:117] "RemoveContainer" containerID="171291850c17ea9c82f5496895df42e63387659dece0b904c3b863b20f5e446d" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.190626 4921 scope.go:117] "RemoveContainer" containerID="c386d575c15f23b50c8ed3208d26d77e577fc3bf8f7820934028baebd326be7b" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.217531 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="013be189-8bbe-40ef-af47-51c8fd79aa1a" path="/var/lib/kubelet/pods/013be189-8bbe-40ef-af47-51c8fd79aa1a/volumes" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.218366 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18c18bb4-4d43-4f8e-aa0d-808598954883" path="/var/lib/kubelet/pods/18c18bb4-4d43-4f8e-aa0d-808598954883/volumes" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.218986 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2410954d-1bc9-4174-9639-5717425cff64" path="/var/lib/kubelet/pods/2410954d-1bc9-4174-9639-5717425cff64/volumes" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.219964 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3188aa17-7df1-4bc3-a929-7e5888cb32c8" path="/var/lib/kubelet/pods/3188aa17-7df1-4bc3-a929-7e5888cb32c8/volumes" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.220607 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ab7ff6-04d8-45b5-93ba-12db1abe7091" path="/var/lib/kubelet/pods/a9ab7ff6-04d8-45b5-93ba-12db1abe7091/volumes" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239476 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239513 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239540 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239569 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239590 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239608 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239642 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.239663 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.241890 4921 scope.go:117] "RemoveContainer" containerID="1233ddcd8bcd9fe1f9cbfb3f838f219cdeac1a2b7ee6c97958983fe429bccdb6" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.257946 4921 scope.go:117] "RemoveContainer" containerID="3acc9830db4fd617b204c4002f866571a43197b2efc219c62f5a6d575fc86432" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.271865 4921 scope.go:117] "RemoveContainer" containerID="70756b488fd188cd8ad61aea740158053814d07f7727e5f9b0db5b3d16feacd1" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.289462 4921 scope.go:117] "RemoveContainer" containerID="d29dc45badb24b08a288c64a158855cce5c927405f82bd1e8d118a63b311f2ee" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.303627 4921 scope.go:117] "RemoveContainer" containerID="c66094c5dc4a9867db94e7bb5d27245526f31ea2327a440d15adbf35ef0c990f" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.317694 4921 scope.go:117] "RemoveContainer" containerID="f922c4b2d1473c624529d8a43f53aca3846f71648e0e5a3ac05779b68a813182" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.330041 4921 scope.go:117] "RemoveContainer" containerID="a61cdcfafa10428011a11a0aa21471f31a7f0a8f8f7fb2defd896ec34886d546" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.340650 4921 scope.go:117] "RemoveContainer" containerID="c71e73fc77ba41a2d5837a572875003714c95530b5a5cd96c68dfefbe632ed8f" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.341977 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342034 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342064 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342142 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342169 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342192 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342210 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342239 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.342586 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343036 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343162 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343169 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343211 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343275 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343337 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.343406 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: I1210 12:59:53.423871 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:53 crc kubenswrapper[4921]: W1210 12:59:53.445074 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-0032dc83eb1e4141dfde7091532372e3cb80f43c135cba58b736a0353d196f2a WatchSource:0}: Error finding container 0032dc83eb1e4141dfde7091532372e3cb80f43c135cba58b736a0353d196f2a: Status 404 returned error can't find the container with id 0032dc83eb1e4141dfde7091532372e3cb80f43c135cba58b736a0353d196f2a Dec 10 12:59:53 crc kubenswrapper[4921]: E1210 12:59:53.447934 4921 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fdc1c6400381b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 12:59:53.447454747 +0000 UTC m=+190.663676671,LastTimestamp:2025-12-10 12:59:53.447454747 +0000 UTC m=+190.663676671,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.019498 4921 generic.go:334] "Generic (PLEG): container finished" podID="f8e5f473-25ff-49f8-8c92-3201eaefae48" containerID="fa90c0921d55c4769bd31234e74cf7c2bd91fc85e6045f0b881bdb66e0f671bb" exitCode=0 Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.019618 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f8e5f473-25ff-49f8-8c92-3201eaefae48","Type":"ContainerDied","Data":"fa90c0921d55c4769bd31234e74cf7c2bd91fc85e6045f0b881bdb66e0f671bb"} Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.021457 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de"} Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.021497 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0032dc83eb1e4141dfde7091532372e3cb80f43c135cba58b736a0353d196f2a"} Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.021977 4921 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.025683 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.027043 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.027756 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50" exitCode=0 Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.027905 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33" exitCode=0 Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.028017 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb" exitCode=0 Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.028101 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682" exitCode=2 Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.027809 4921 scope.go:117] "RemoveContainer" containerID="3b39874b20cdccc7903753342421a1f7e13b7e99a2cb699a7c0e44226aebd4f4" Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.855013 4921 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.855478 4921 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.855924 4921 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.856453 4921 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.856847 4921 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:54 crc kubenswrapper[4921]: I1210 12:59:54.856905 4921 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 10 12:59:54 crc kubenswrapper[4921]: E1210 12:59:54.857309 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="200ms" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.039762 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 12:59:55 crc kubenswrapper[4921]: E1210 12:59:55.059430 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="400ms" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.431912 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:55 crc kubenswrapper[4921]: E1210 12:59:55.460971 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="800ms" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577244 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-kubelet-dir\") pod \"f8e5f473-25ff-49f8-8c92-3201eaefae48\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577328 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8e5f473-25ff-49f8-8c92-3201eaefae48-kube-api-access\") pod \"f8e5f473-25ff-49f8-8c92-3201eaefae48\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577377 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-var-lock\") pod \"f8e5f473-25ff-49f8-8c92-3201eaefae48\" (UID: \"f8e5f473-25ff-49f8-8c92-3201eaefae48\") " Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577383 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f8e5f473-25ff-49f8-8c92-3201eaefae48" (UID: "f8e5f473-25ff-49f8-8c92-3201eaefae48"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577549 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-var-lock" (OuterVolumeSpecName: "var-lock") pod "f8e5f473-25ff-49f8-8c92-3201eaefae48" (UID: "f8e5f473-25ff-49f8-8c92-3201eaefae48"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577707 4921 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.577734 4921 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f8e5f473-25ff-49f8-8c92-3201eaefae48-var-lock\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.581823 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8e5f473-25ff-49f8-8c92-3201eaefae48-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f8e5f473-25ff-49f8-8c92-3201eaefae48" (UID: "f8e5f473-25ff-49f8-8c92-3201eaefae48"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.679501 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8e5f473-25ff-49f8-8c92-3201eaefae48-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.911205 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 12:59:55 crc kubenswrapper[4921]: I1210 12:59:55.911928 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.048440 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.050085 4921 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14" exitCode=0 Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.050150 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.050213 4921 scope.go:117] "RemoveContainer" containerID="f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.052912 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f8e5f473-25ff-49f8-8c92-3201eaefae48","Type":"ContainerDied","Data":"c78dd785a2e7a210720cd383fa9eb4536badbea4e7b73a4296757e5bae87e9fc"} Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.052970 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c78dd785a2e7a210720cd383fa9eb4536badbea4e7b73a4296757e5bae87e9fc" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.053006 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.070332 4921 scope.go:117] "RemoveContainer" containerID="1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.097126 4921 scope.go:117] "RemoveContainer" containerID="6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.097762 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.097876 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.097911 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.098303 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.098333 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.099191 4921 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.099209 4921 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.099136 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.122106 4921 scope.go:117] "RemoveContainer" containerID="692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.139129 4921 scope.go:117] "RemoveContainer" containerID="0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.156680 4921 scope.go:117] "RemoveContainer" containerID="bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.177011 4921 scope.go:117] "RemoveContainer" containerID="f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.177495 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\": container with ID starting with f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50 not found: ID does not exist" containerID="f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.177538 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50"} err="failed to get container status \"f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\": rpc error: code = NotFound desc = could not find container \"f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50\": container with ID starting with f534d6390920d177e185001b28f7ece42d82a0da922b4aaf174c271dbe975c50 not found: ID does not exist" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.177571 4921 scope.go:117] "RemoveContainer" containerID="1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.177847 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\": container with ID starting with 1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33 not found: ID does not exist" containerID="1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.177877 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33"} err="failed to get container status \"1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\": rpc error: code = NotFound desc = could not find container \"1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33\": container with ID starting with 1e5afbcb1ea81c3f9ec4152ef614a3f07ba1ded75c774c467e968f9c3ee72e33 not found: ID does not exist" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.177894 4921 scope.go:117] "RemoveContainer" containerID="6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.178104 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\": container with ID starting with 6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb not found: ID does not exist" containerID="6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.178130 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb"} err="failed to get container status \"6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\": rpc error: code = NotFound desc = could not find container \"6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb\": container with ID starting with 6eaca0cb438e61f0856ed7dc64256ccd02aee8dac014d1f5e9cd8aa180c736fb not found: ID does not exist" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.178148 4921 scope.go:117] "RemoveContainer" containerID="692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.178566 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\": container with ID starting with 692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682 not found: ID does not exist" containerID="692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.178602 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682"} err="failed to get container status \"692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\": rpc error: code = NotFound desc = could not find container \"692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682\": container with ID starting with 692a4c4828dc74b1bfb948f58fab96ee6674030cb9009c72f30f9eae482eb682 not found: ID does not exist" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.178621 4921 scope.go:117] "RemoveContainer" containerID="0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.178873 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\": container with ID starting with 0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14 not found: ID does not exist" containerID="0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.178903 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14"} err="failed to get container status \"0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\": rpc error: code = NotFound desc = could not find container \"0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14\": container with ID starting with 0534394a39803e8a7555e29d0770b5ac7f9197a5f0e03bec4c5460d77fffdd14 not found: ID does not exist" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.178924 4921 scope.go:117] "RemoveContainer" containerID="bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.179231 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\": container with ID starting with bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1 not found: ID does not exist" containerID="bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.179291 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1"} err="failed to get container status \"bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\": rpc error: code = NotFound desc = could not find container \"bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1\": container with ID starting with bee74fc4c681cc10c5a460c807659272e393e19173109e82ef65371c5b363ce1 not found: ID does not exist" Dec 10 12:59:56 crc kubenswrapper[4921]: I1210 12:59:56.200922 4921 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 12:59:56 crc kubenswrapper[4921]: E1210 12:59:56.261720 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="1.6s" Dec 10 12:59:57 crc kubenswrapper[4921]: I1210 12:59:57.198534 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 10 12:59:57 crc kubenswrapper[4921]: E1210 12:59:57.390319 4921 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fdc1c6400381b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 12:59:53.447454747 +0000 UTC m=+190.663676671,LastTimestamp:2025-12-10 12:59:53.447454747 +0000 UTC m=+190.663676671,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 12:59:57 crc kubenswrapper[4921]: E1210 12:59:57.863099 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="3.2s" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.091948 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.095952 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.096312 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.096634 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.096910 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.097248 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.097580 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.097915 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 12:59:58 crc kubenswrapper[4921]: I1210 12:59:58.098191 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:00 crc kubenswrapper[4921]: E1210 13:00:00.252469 4921 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-556st" volumeName="registry-storage" Dec 10 13:00:01 crc kubenswrapper[4921]: E1210 13:00:01.063895 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="6.4s" Dec 10 13:00:03 crc kubenswrapper[4921]: I1210 13:00:03.204077 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:03 crc kubenswrapper[4921]: I1210 13:00:03.205449 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:03 crc kubenswrapper[4921]: I1210 13:00:03.206137 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:03 crc kubenswrapper[4921]: I1210 13:00:03.206484 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:07 crc kubenswrapper[4921]: E1210 13:00:07.391204 4921 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fdc1c6400381b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 12:59:53.447454747 +0000 UTC m=+190.663676671,LastTimestamp:2025-12-10 12:59:53.447454747 +0000 UTC m=+190.663676671,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 13:00:07 crc kubenswrapper[4921]: E1210 13:00:07.465849 4921 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="7s" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.122471 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.122778 4921 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543" exitCode=1 Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.122808 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543"} Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.123287 4921 scope.go:117] "RemoveContainer" containerID="5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.123904 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.124467 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.125094 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.125484 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.125861 4921 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.192687 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.193589 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.193777 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.194027 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.194246 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.194565 4921 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.213696 4921 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.213728 4921 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:08 crc kubenswrapper[4921]: E1210 13:00:08.214102 4921 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:08 crc kubenswrapper[4921]: I1210 13:00:08.214628 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:08 crc kubenswrapper[4921]: W1210 13:00:08.237338 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-1dc9b5f27de1cad1f710d2f0c8614614818860b0a06252a80daef77e17e3ef0f WatchSource:0}: Error finding container 1dc9b5f27de1cad1f710d2f0c8614614818860b0a06252a80daef77e17e3ef0f: Status 404 returned error can't find the container with id 1dc9b5f27de1cad1f710d2f0c8614614818860b0a06252a80daef77e17e3ef0f Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.127962 4921 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="a2d0f900c345a5dc932c35c9bf73ddc0d61fd6e210473fa2b380c7b7852a0cb6" exitCode=0 Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.128225 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"a2d0f900c345a5dc932c35c9bf73ddc0d61fd6e210473fa2b380c7b7852a0cb6"} Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.128250 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1dc9b5f27de1cad1f710d2f0c8614614818860b0a06252a80daef77e17e3ef0f"} Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.128481 4921 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.128492 4921 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:09 crc kubenswrapper[4921]: E1210 13:00:09.128875 4921 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.129022 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.129151 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.129292 4921 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.129485 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.129713 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.135452 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.135524 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ac792ce44743bf4c65763d672a9e77eaaef6d7b2e9377ab7f71f9c051b69c2bc"} Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.139788 4921 status_manager.go:851] "Failed to get status for pod" podUID="2629ed61-02c7-450a-9d32-d3277755229f" pod="openshift-marketplace/certified-operators-wblqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-wblqq\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.140378 4921 status_manager.go:851] "Failed to get status for pod" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" pod="openshift-marketplace/redhat-marketplace-54ksn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-54ksn\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.140872 4921 status_manager.go:851] "Failed to get status for pod" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" pod="openshift-marketplace/community-operators-d7btl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-d7btl\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.141193 4921 status_manager.go:851] "Failed to get status for pod" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.141608 4921 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.182:6443: connect: connection refused" Dec 10 13:00:09 crc kubenswrapper[4921]: I1210 13:00:09.740469 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" podUID="00792261-f23c-4fc8-a67b-4b7753b692a1" containerName="oauth-openshift" containerID="cri-o://ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b" gracePeriod=15 Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.125174 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.165776 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"cc4fefc6c3bae9b6e8fa6ab7f51320e8ac46dbaa9ef0aa7a31b3f8cdcebcd43f"} Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.165834 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f8303ba1a7720a3fc609b5fc1177c9b455b2c5618e8b8dbe9eeaccf3b9fd1dfa"} Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.165849 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"aa113948850b0ff4a2a113aedfb017192e5f48a34a39ee336dde6b2cf9c26efe"} Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.165859 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ace57d412c1d069e1c7cb177de3e639f9028f606af1d383816e64aa87ca5d1d1"} Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.171419 4921 generic.go:334] "Generic (PLEG): container finished" podID="00792261-f23c-4fc8-a67b-4b7753b692a1" containerID="ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b" exitCode=0 Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.171477 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.171466 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" event={"ID":"00792261-f23c-4fc8-a67b-4b7753b692a1","Type":"ContainerDied","Data":"ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b"} Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.171609 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g7bns" event={"ID":"00792261-f23c-4fc8-a67b-4b7753b692a1","Type":"ContainerDied","Data":"9c921519cb2d1983857f71f2fc56ec67bec1d99fc910d5a34c456ee8e32fe4d4"} Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.171657 4921 scope.go:117] "RemoveContainer" containerID="ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.198531 4921 scope.go:117] "RemoveContainer" containerID="ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b" Dec 10 13:00:10 crc kubenswrapper[4921]: E1210 13:00:10.199090 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b\": container with ID starting with ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b not found: ID does not exist" containerID="ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.199175 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b"} err="failed to get container status \"ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b\": rpc error: code = NotFound desc = could not find container \"ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b\": container with ID starting with ef43b92ec772a305c7fcfa5028895d26ac96848f2424ca4f8c07dfb0e76de90b not found: ID does not exist" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281512 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-cliconfig\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281578 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-idp-0-file-data\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281598 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-dir\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281624 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-ocp-branding-template\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281658 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-router-certs\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281674 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-trusted-ca-bundle\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281719 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-provider-selection\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281751 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-policies\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281776 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-session\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281794 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-error\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281810 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-service-ca\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281827 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-login\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281853 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fks2\" (UniqueName: \"kubernetes.io/projected/00792261-f23c-4fc8-a67b-4b7753b692a1-kube-api-access-9fks2\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.281873 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-serving-cert\") pod \"00792261-f23c-4fc8-a67b-4b7753b692a1\" (UID: \"00792261-f23c-4fc8-a67b-4b7753b692a1\") " Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.282557 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.283008 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.283293 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.287768 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.288114 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.302607 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.302897 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00792261-f23c-4fc8-a67b-4b7753b692a1-kube-api-access-9fks2" (OuterVolumeSpecName: "kube-api-access-9fks2") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "kube-api-access-9fks2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.313958 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.315511 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.315830 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.319266 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.319407 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.319488 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.321340 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "00792261-f23c-4fc8-a67b-4b7753b692a1" (UID: "00792261-f23c-4fc8-a67b-4b7753b692a1"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383323 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fks2\" (UniqueName: \"kubernetes.io/projected/00792261-f23c-4fc8-a67b-4b7753b692a1-kube-api-access-9fks2\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383357 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383371 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383381 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383394 4921 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383422 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383432 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383440 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383449 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383459 4921 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383467 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383476 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383483 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:10 crc kubenswrapper[4921]: I1210 13:00:10.383492 4921 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/00792261-f23c-4fc8-a67b-4b7753b692a1-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:11 crc kubenswrapper[4921]: I1210 13:00:11.179947 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9758bbb98593012282018c7a6d076cfee3ad0317b1145e0af5f6b5ced0f2b851"} Dec 10 13:00:11 crc kubenswrapper[4921]: I1210 13:00:11.180055 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:11 crc kubenswrapper[4921]: I1210 13:00:11.180183 4921 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:11 crc kubenswrapper[4921]: I1210 13:00:11.180211 4921 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:13 crc kubenswrapper[4921]: I1210 13:00:13.215203 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:13 crc kubenswrapper[4921]: I1210 13:00:13.215576 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:13 crc kubenswrapper[4921]: I1210 13:00:13.223843 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.205787 4921 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.463176 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.710733 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.710810 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.710862 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.711534 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.711602 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998" gracePeriod=600 Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.739573 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.739775 4921 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 13:00:16 crc kubenswrapper[4921]: I1210 13:00:16.739831 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.213342 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998" exitCode=0 Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.213433 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998"} Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.213998 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"d291bc9e31f368d19d6f0d518943c401b76d569a551a18fee6a5eac2aeea537d"} Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.214701 4921 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.214715 4921 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.222841 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:17 crc kubenswrapper[4921]: I1210 13:00:17.233942 4921 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="f41916f1-8015-4f9f-8833-9a9766cf9886" Dec 10 13:00:18 crc kubenswrapper[4921]: I1210 13:00:18.219221 4921 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:18 crc kubenswrapper[4921]: I1210 13:00:18.219585 4921 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:23 crc kubenswrapper[4921]: I1210 13:00:23.209182 4921 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="f41916f1-8015-4f9f-8833-9a9766cf9886" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.043793 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.118827 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.237184 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.410726 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.598127 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.739355 4921 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.739496 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.947890 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 10 13:00:26 crc kubenswrapper[4921]: I1210 13:00:26.949150 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.492890 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.545947 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.644140 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.691976 4921 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.822519 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.913757 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 10 13:00:27 crc kubenswrapper[4921]: I1210 13:00:27.955969 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.349546 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.391366 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.447715 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.504146 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.527870 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.589467 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.601875 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.608042 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.722478 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.743116 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.743633 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.757607 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.829155 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.847827 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.945922 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 10 13:00:28 crc kubenswrapper[4921]: I1210 13:00:28.987669 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.030860 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.061932 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.162335 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.192706 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.223451 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.265141 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.271581 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.288757 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.323149 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.372735 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.388199 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.508559 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.674849 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 10 13:00:29 crc kubenswrapper[4921]: I1210 13:00:29.974157 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.063281 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.114979 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.217333 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.398817 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.445609 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.679519 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.859226 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.860543 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.903157 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.984305 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 10 13:00:30 crc kubenswrapper[4921]: I1210 13:00:30.991171 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.005905 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.019592 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.073343 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.086515 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.099791 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.115020 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.289279 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.298249 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.298542 4921 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.308078 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.426305 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.558320 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.568938 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.575488 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.607641 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.799845 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.813679 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.874038 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.888091 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.895808 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.949513 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 10 13:00:31 crc kubenswrapper[4921]: I1210 13:00:31.956530 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.002288 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.151458 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.157764 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.159456 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.300680 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.332042 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.345670 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.380775 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.417648 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.427562 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.503703 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.528645 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.683339 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.780549 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.791880 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.856531 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.895728 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 10 13:00:32 crc kubenswrapper[4921]: I1210 13:00:32.906765 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.026487 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.061409 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.063179 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.097164 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.133339 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.148762 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.235463 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.434324 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.567489 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.573081 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.584258 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.618941 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.621494 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.729967 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.781335 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.781982 4921 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.786755 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.880478 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.965211 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 10 13:00:33 crc kubenswrapper[4921]: I1210 13:00:33.983025 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.124074 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.252376 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.354270 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.378046 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.414545 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.441727 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.539080 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.588956 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.666929 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.737290 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.753025 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.763005 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.834820 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 13:00:34 crc kubenswrapper[4921]: I1210 13:00:34.857613 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.087210 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.096693 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.179010 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.230119 4921 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.333613 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.377107 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.379283 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.441561 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.443892 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.458522 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.709921 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.756003 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.797288 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.869156 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 10 13:00:35 crc kubenswrapper[4921]: I1210 13:00:35.884412 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.031684 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.105219 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.129036 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.156475 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.187420 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.244576 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.333645 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.361367 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.414242 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.463217 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.509991 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.592091 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.608842 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.608854 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.739099 4921 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.739439 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.739588 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.740218 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"ac792ce44743bf4c65763d672a9e77eaaef6d7b2e9377ab7f71f9c051b69c2bc"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.740739 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://ac792ce44743bf4c65763d672a9e77eaaef6d7b2e9377ab7f71f9c051b69c2bc" gracePeriod=30 Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.765053 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.779645 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.829902 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.885491 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.895447 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 10 13:00:36 crc kubenswrapper[4921]: I1210 13:00:36.975671 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.124118 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.449672 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.455098 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.569604 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.579838 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.600037 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.795260 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.861858 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.961120 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 10 13:00:37 crc kubenswrapper[4921]: I1210 13:00:37.991316 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.046358 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.082742 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.091890 4921 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.105849 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.246876 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.354895 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.375845 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.381973 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.393741 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.415193 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.616167 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.716829 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.808960 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.923480 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.971371 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 13:00:38 crc kubenswrapper[4921]: I1210 13:00:38.987584 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.085096 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.102509 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.174483 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.205170 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.233260 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.257077 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.258504 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.289048 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.365713 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.436201 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.443849 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.504323 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.746567 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.766874 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.905433 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 10 13:00:39 crc kubenswrapper[4921]: I1210 13:00:39.947262 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.578104 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.578962 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.579228 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.580564 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.788315 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.797101 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 13:00:40 crc kubenswrapper[4921]: I1210 13:00:40.974073 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.041005 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.119476 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.381423 4921 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.385977 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g7bns","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/redhat-marketplace-54ksn","openshift-marketplace/certified-operators-wblqq","openshift-marketplace/community-operators-d7btl"] Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.386047 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-76f75987bf-qmgm4","openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 13:00:41 crc kubenswrapper[4921]: E1210 13:00:41.386241 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00792261-f23c-4fc8-a67b-4b7753b692a1" containerName="oauth-openshift" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.386260 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="00792261-f23c-4fc8-a67b-4b7753b692a1" containerName="oauth-openshift" Dec 10 13:00:41 crc kubenswrapper[4921]: E1210 13:00:41.386272 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" containerName="installer" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.386279 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" containerName="installer" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.386602 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="00792261-f23c-4fc8-a67b-4b7753b692a1" containerName="oauth-openshift" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.386638 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8e5f473-25ff-49f8-8c92-3201eaefae48" containerName="installer" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.387105 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.387902 4921 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.387929 4921 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f57208b0-80bc-4c1b-bbab-9d2f858972f6" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391077 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391099 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391127 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391318 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391376 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391472 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391609 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391819 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391917 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.391995 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.393556 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.393735 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.394943 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.398867 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.410483 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.412156 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.417983 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=25.417967542 podStartE2EDuration="25.417967542s" podCreationTimestamp="2025-12-10 13:00:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:00:41.413779848 +0000 UTC m=+238.630001782" watchObservedRunningTime="2025-12-10 13:00:41.417967542 +0000 UTC m=+238.634189456" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.509175 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.536915 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581244 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-service-ca\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581308 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6vbj\" (UniqueName: \"kubernetes.io/projected/74834b19-3ac4-40c3-a231-ef6d6b3c2716-kube-api-access-v6vbj\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581329 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581346 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-audit-policies\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581370 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581407 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-session\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581431 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-error\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581470 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581503 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581524 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-login\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581544 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581565 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581655 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/74834b19-3ac4-40c3-a231-ef6d6b3c2716-audit-dir\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.581716 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-router-certs\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.615727 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.637010 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682840 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6vbj\" (UniqueName: \"kubernetes.io/projected/74834b19-3ac4-40c3-a231-ef6d6b3c2716-kube-api-access-v6vbj\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682887 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682905 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-audit-policies\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682933 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682954 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-session\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682975 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-error\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.682991 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.683010 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.683032 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-login\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.683831 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.683832 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-audit-policies\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.684813 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.684861 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.684895 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/74834b19-3ac4-40c3-a231-ef6d6b3c2716-audit-dir\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.684974 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-router-certs\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.685000 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.685005 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-service-ca\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.685597 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-service-ca\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.685641 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/74834b19-3ac4-40c3-a231-ef6d6b3c2716-audit-dir\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.688672 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.688680 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.688764 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-session\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.690198 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-error\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.691785 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.692039 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-user-template-login\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.697779 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-router-certs\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.698874 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/74834b19-3ac4-40c3-a231-ef6d6b3c2716-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.700216 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6vbj\" (UniqueName: \"kubernetes.io/projected/74834b19-3ac4-40c3-a231-ef6d6b3c2716-kube-api-access-v6vbj\") pod \"oauth-openshift-76f75987bf-qmgm4\" (UID: \"74834b19-3ac4-40c3-a231-ef6d6b3c2716\") " pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.718582 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.787326 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 10 13:00:41 crc kubenswrapper[4921]: I1210 13:00:41.921264 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 10 13:00:42 crc kubenswrapper[4921]: I1210 13:00:42.092415 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-76f75987bf-qmgm4"] Dec 10 13:00:42 crc kubenswrapper[4921]: I1210 13:00:42.604634 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" event={"ID":"74834b19-3ac4-40c3-a231-ef6d6b3c2716","Type":"ContainerStarted","Data":"10fffb3600eb6534e3c91fb6b923ce538b83197669d1f2b260d312ecb7fa12f5"} Dec 10 13:00:42 crc kubenswrapper[4921]: I1210 13:00:42.604969 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:42 crc kubenswrapper[4921]: I1210 13:00:42.604981 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" event={"ID":"74834b19-3ac4-40c3-a231-ef6d6b3c2716","Type":"ContainerStarted","Data":"44285ff707667aec0478b1442b2e473fb7bc6a51a8cc49449ac24cb5daeecc23"} Dec 10 13:00:42 crc kubenswrapper[4921]: I1210 13:00:42.624442 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" podStartSLOduration=58.624423557 podStartE2EDuration="58.624423557s" podCreationTimestamp="2025-12-10 12:59:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:00:42.622429943 +0000 UTC m=+239.838651867" watchObservedRunningTime="2025-12-10 13:00:42.624423557 +0000 UTC m=+239.840645501" Dec 10 13:00:42 crc kubenswrapper[4921]: I1210 13:00:42.853991 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-76f75987bf-qmgm4" Dec 10 13:00:43 crc kubenswrapper[4921]: I1210 13:00:43.199966 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00792261-f23c-4fc8-a67b-4b7753b692a1" path="/var/lib/kubelet/pods/00792261-f23c-4fc8-a67b-4b7753b692a1/volumes" Dec 10 13:00:43 crc kubenswrapper[4921]: I1210 13:00:43.200782 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13ae054d-506c-4469-baaa-c8cf6347b2f1" path="/var/lib/kubelet/pods/13ae054d-506c-4469-baaa-c8cf6347b2f1/volumes" Dec 10 13:00:43 crc kubenswrapper[4921]: I1210 13:00:43.201417 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2629ed61-02c7-450a-9d32-d3277755229f" path="/var/lib/kubelet/pods/2629ed61-02c7-450a-9d32-d3277755229f/volumes" Dec 10 13:00:43 crc kubenswrapper[4921]: I1210 13:00:43.202490 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc9190a3-c02a-48f7-ab9f-8be8951f3f37" path="/var/lib/kubelet/pods/dc9190a3-c02a-48f7-ab9f-8be8951f3f37/volumes" Dec 10 13:00:49 crc kubenswrapper[4921]: I1210 13:00:49.819867 4921 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 13:00:49 crc kubenswrapper[4921]: I1210 13:00:49.820706 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de" gracePeriod=5 Dec 10 13:00:50 crc kubenswrapper[4921]: I1210 13:00:50.207256 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.384427 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.384757 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564653 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564708 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564734 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564798 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564827 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564855 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564871 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564926 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.564846 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.565182 4921 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.565208 4921 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.565221 4921 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.565233 4921 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.576966 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.665815 4921 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.666343 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.666537 4921 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de" exitCode=137 Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.666603 4921 scope.go:117] "RemoveContainer" containerID="6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.666733 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.682260 4921 scope.go:117] "RemoveContainer" containerID="6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de" Dec 10 13:00:55 crc kubenswrapper[4921]: E1210 13:00:55.683827 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de\": container with ID starting with 6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de not found: ID does not exist" containerID="6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de" Dec 10 13:00:55 crc kubenswrapper[4921]: I1210 13:00:55.683876 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de"} err="failed to get container status \"6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de\": rpc error: code = NotFound desc = could not find container \"6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de\": container with ID starting with 6312a19d1213782f70503239c07f7233e99ce53bf35a25d8f677e851571520de not found: ID does not exist" Dec 10 13:00:57 crc kubenswrapper[4921]: I1210 13:00:57.199704 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 10 13:01:00 crc kubenswrapper[4921]: I1210 13:01:00.382101 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 10 13:01:01 crc kubenswrapper[4921]: I1210 13:01:01.008315 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 10 13:01:02 crc kubenswrapper[4921]: I1210 13:01:02.434617 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.559970 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s2x6r"] Dec 10 13:01:03 crc kubenswrapper[4921]: E1210 13:01:03.560481 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.560496 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.560596 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.561212 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.564052 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.575084 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s2x6r"] Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.665243 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-utilities\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.665724 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-catalog-content\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.665952 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j7l2\" (UniqueName: \"kubernetes.io/projected/02c8bc85-0cb6-49fb-8c84-625acc7de573-kube-api-access-5j7l2\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.766822 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-catalog-content\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.767164 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j7l2\" (UniqueName: \"kubernetes.io/projected/02c8bc85-0cb6-49fb-8c84-625acc7de573-kube-api-access-5j7l2\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.767314 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-utilities\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.767426 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-catalog-content\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.767630 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-utilities\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.793913 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j7l2\" (UniqueName: \"kubernetes.io/projected/02c8bc85-0cb6-49fb-8c84-625acc7de573-kube-api-access-5j7l2\") pod \"certified-operators-s2x6r\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:03 crc kubenswrapper[4921]: I1210 13:01:03.885096 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.101284 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s2x6r"] Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.239596 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j"] Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.240204 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.242758 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.242856 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.258177 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j"] Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.373218 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-secret-volume\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.374098 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grdhp\" (UniqueName: \"kubernetes.io/projected/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-kube-api-access-grdhp\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.374375 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-config-volume\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.476530 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-config-volume\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.476610 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-secret-volume\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.476640 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grdhp\" (UniqueName: \"kubernetes.io/projected/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-kube-api-access-grdhp\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.477495 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-config-volume\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.485107 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-secret-volume\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.502474 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grdhp\" (UniqueName: \"kubernetes.io/projected/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-kube-api-access-grdhp\") pod \"collect-profiles-29422860-gd57j\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.553600 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.727615 4921 generic.go:334] "Generic (PLEG): container finished" podID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerID="98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79" exitCode=0 Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.727704 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2x6r" event={"ID":"02c8bc85-0cb6-49fb-8c84-625acc7de573","Type":"ContainerDied","Data":"98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79"} Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.727756 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2x6r" event={"ID":"02c8bc85-0cb6-49fb-8c84-625acc7de573","Type":"ContainerStarted","Data":"5049499e516474bcdfa75a35561e175178f98ce528f1ea3dad8f031998df3aa3"} Dec 10 13:01:04 crc kubenswrapper[4921]: I1210 13:01:04.945504 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j"] Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.232991 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.439825 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.735076 4921 generic.go:334] "Generic (PLEG): container finished" podID="9ecd670a-6fc9-4f7c-9394-0a2fe5590147" containerID="983360af8a17941543dfcf5d115c780907817f1b29665d0a59642775d5a7c10d" exitCode=0 Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.735129 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" event={"ID":"9ecd670a-6fc9-4f7c-9394-0a2fe5590147","Type":"ContainerDied","Data":"983360af8a17941543dfcf5d115c780907817f1b29665d0a59642775d5a7c10d"} Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.735162 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" event={"ID":"9ecd670a-6fc9-4f7c-9394-0a2fe5590147","Type":"ContainerStarted","Data":"69044198b035563a3e12e3a20421a018b80a0807dda37bdf5cbd9e16b37d0e4e"} Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.762250 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hs22j"] Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.763840 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.765798 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.765894 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hs22j"] Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.892649 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftd55\" (UniqueName: \"kubernetes.io/projected/20b7355a-9d7b-46c3-be68-346748a6d5d7-kube-api-access-ftd55\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.893244 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20b7355a-9d7b-46c3-be68-346748a6d5d7-utilities\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.893309 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20b7355a-9d7b-46c3-be68-346748a6d5d7-catalog-content\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.969544 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jc7wv"] Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.970446 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.972853 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.986818 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jc7wv"] Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994040 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d637dcb-0378-4034-84f1-8e92718180c3-catalog-content\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994094 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftd55\" (UniqueName: \"kubernetes.io/projected/20b7355a-9d7b-46c3-be68-346748a6d5d7-kube-api-access-ftd55\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994146 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjppp\" (UniqueName: \"kubernetes.io/projected/4d637dcb-0378-4034-84f1-8e92718180c3-kube-api-access-hjppp\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994184 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20b7355a-9d7b-46c3-be68-346748a6d5d7-utilities\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994212 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20b7355a-9d7b-46c3-be68-346748a6d5d7-catalog-content\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994247 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d637dcb-0378-4034-84f1-8e92718180c3-utilities\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994687 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20b7355a-9d7b-46c3-be68-346748a6d5d7-utilities\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:05 crc kubenswrapper[4921]: I1210 13:01:05.994752 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20b7355a-9d7b-46c3-be68-346748a6d5d7-catalog-content\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.026787 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftd55\" (UniqueName: \"kubernetes.io/projected/20b7355a-9d7b-46c3-be68-346748a6d5d7-kube-api-access-ftd55\") pod \"redhat-marketplace-hs22j\" (UID: \"20b7355a-9d7b-46c3-be68-346748a6d5d7\") " pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.078122 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.094708 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjppp\" (UniqueName: \"kubernetes.io/projected/4d637dcb-0378-4034-84f1-8e92718180c3-kube-api-access-hjppp\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.094809 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d637dcb-0378-4034-84f1-8e92718180c3-utilities\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.094848 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d637dcb-0378-4034-84f1-8e92718180c3-catalog-content\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.095258 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d637dcb-0378-4034-84f1-8e92718180c3-catalog-content\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.095647 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d637dcb-0378-4034-84f1-8e92718180c3-utilities\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.119611 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjppp\" (UniqueName: \"kubernetes.io/projected/4d637dcb-0378-4034-84f1-8e92718180c3-kube-api-access-hjppp\") pod \"redhat-operators-jc7wv\" (UID: \"4d637dcb-0378-4034-84f1-8e92718180c3\") " pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.287880 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.501771 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hs22j"] Dec 10 13:01:06 crc kubenswrapper[4921]: W1210 13:01:06.508874 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20b7355a_9d7b_46c3_be68_346748a6d5d7.slice/crio-4960cebfc516e2906558865d69e34a1ebb10b66be6f231f2430b02784e6093ee WatchSource:0}: Error finding container 4960cebfc516e2906558865d69e34a1ebb10b66be6f231f2430b02784e6093ee: Status 404 returned error can't find the container with id 4960cebfc516e2906558865d69e34a1ebb10b66be6f231f2430b02784e6093ee Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.686778 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jc7wv"] Dec 10 13:01:06 crc kubenswrapper[4921]: W1210 13:01:06.718010 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d637dcb_0378_4034_84f1_8e92718180c3.slice/crio-5975520f0c128a90d6b38bc53c2f0ff7b08e66ece29b6753b28c89d8131f555f WatchSource:0}: Error finding container 5975520f0c128a90d6b38bc53c2f0ff7b08e66ece29b6753b28c89d8131f555f: Status 404 returned error can't find the container with id 5975520f0c128a90d6b38bc53c2f0ff7b08e66ece29b6753b28c89d8131f555f Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.753520 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hs22j" event={"ID":"20b7355a-9d7b-46c3-be68-346748a6d5d7","Type":"ContainerDied","Data":"04efe98be1c22719caec5829eb93e7711845cece527f8c5561a333c2d5eef736"} Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.755664 4921 generic.go:334] "Generic (PLEG): container finished" podID="20b7355a-9d7b-46c3-be68-346748a6d5d7" containerID="04efe98be1c22719caec5829eb93e7711845cece527f8c5561a333c2d5eef736" exitCode=0 Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.755873 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hs22j" event={"ID":"20b7355a-9d7b-46c3-be68-346748a6d5d7","Type":"ContainerStarted","Data":"4960cebfc516e2906558865d69e34a1ebb10b66be6f231f2430b02784e6093ee"} Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.766792 4921 generic.go:334] "Generic (PLEG): container finished" podID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerID="22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb" exitCode=0 Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.766889 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2x6r" event={"ID":"02c8bc85-0cb6-49fb-8c84-625acc7de573","Type":"ContainerDied","Data":"22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb"} Dec 10 13:01:06 crc kubenswrapper[4921]: I1210 13:01:06.768901 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jc7wv" event={"ID":"4d637dcb-0378-4034-84f1-8e92718180c3","Type":"ContainerStarted","Data":"5975520f0c128a90d6b38bc53c2f0ff7b08e66ece29b6753b28c89d8131f555f"} Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.067263 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.220804 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grdhp\" (UniqueName: \"kubernetes.io/projected/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-kube-api-access-grdhp\") pod \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.221285 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-secret-volume\") pod \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.221344 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-config-volume\") pod \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\" (UID: \"9ecd670a-6fc9-4f7c-9394-0a2fe5590147\") " Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.222196 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-config-volume" (OuterVolumeSpecName: "config-volume") pod "9ecd670a-6fc9-4f7c-9394-0a2fe5590147" (UID: "9ecd670a-6fc9-4f7c-9394-0a2fe5590147"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.228556 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9ecd670a-6fc9-4f7c-9394-0a2fe5590147" (UID: "9ecd670a-6fc9-4f7c-9394-0a2fe5590147"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.228584 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-kube-api-access-grdhp" (OuterVolumeSpecName: "kube-api-access-grdhp") pod "9ecd670a-6fc9-4f7c-9394-0a2fe5590147" (UID: "9ecd670a-6fc9-4f7c-9394-0a2fe5590147"). InnerVolumeSpecName "kube-api-access-grdhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.322111 4921 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.322147 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grdhp\" (UniqueName: \"kubernetes.io/projected/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-kube-api-access-grdhp\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.322161 4921 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9ecd670a-6fc9-4f7c-9394-0a2fe5590147-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.778506 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2x6r" event={"ID":"02c8bc85-0cb6-49fb-8c84-625acc7de573","Type":"ContainerStarted","Data":"b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22"} Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.780287 4921 generic.go:334] "Generic (PLEG): container finished" podID="4d637dcb-0378-4034-84f1-8e92718180c3" containerID="0a737e8c32e561d149dab315d9020d07f8c05320c866a55c19976bce68323c74" exitCode=0 Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.780378 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jc7wv" event={"ID":"4d637dcb-0378-4034-84f1-8e92718180c3","Type":"ContainerDied","Data":"0a737e8c32e561d149dab315d9020d07f8c05320c866a55c19976bce68323c74"} Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.783470 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.783481 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422860-gd57j" event={"ID":"9ecd670a-6fc9-4f7c-9394-0a2fe5590147","Type":"ContainerDied","Data":"69044198b035563a3e12e3a20421a018b80a0807dda37bdf5cbd9e16b37d0e4e"} Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.783534 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69044198b035563a3e12e3a20421a018b80a0807dda37bdf5cbd9e16b37d0e4e" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.785952 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.791815 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.791879 4921 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="ac792ce44743bf4c65763d672a9e77eaaef6d7b2e9377ab7f71f9c051b69c2bc" exitCode=137 Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.791925 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"ac792ce44743bf4c65763d672a9e77eaaef6d7b2e9377ab7f71f9c051b69c2bc"} Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.791958 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"723d1cb5bb34ddc2f9c9d9e414371869bf6a510897fa1dd7419395e5bca11e9b"} Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.791982 4921 scope.go:117] "RemoveContainer" containerID="5a4716beddbcd24e8418830aa5494cffffc21272e45e30bd15cfe58bfc07c543" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.809692 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s2x6r" podStartSLOduration=2.331524576 podStartE2EDuration="4.809671443s" podCreationTimestamp="2025-12-10 13:01:03 +0000 UTC" firstStartedPulling="2025-12-10 13:01:04.731415193 +0000 UTC m=+261.947637127" lastFinishedPulling="2025-12-10 13:01:07.20956207 +0000 UTC m=+264.425783994" observedRunningTime="2025-12-10 13:01:07.808015349 +0000 UTC m=+265.024237283" watchObservedRunningTime="2025-12-10 13:01:07.809671443 +0000 UTC m=+265.025893377" Dec 10 13:01:07 crc kubenswrapper[4921]: I1210 13:01:07.962096 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 10 13:01:08 crc kubenswrapper[4921]: I1210 13:01:08.800084 4921 generic.go:334] "Generic (PLEG): container finished" podID="20b7355a-9d7b-46c3-be68-346748a6d5d7" containerID="e6c74e736d5b186d220487da942ee482ff52a69b71b3094ed719ede149538278" exitCode=0 Dec 10 13:01:08 crc kubenswrapper[4921]: I1210 13:01:08.800131 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hs22j" event={"ID":"20b7355a-9d7b-46c3-be68-346748a6d5d7","Type":"ContainerDied","Data":"e6c74e736d5b186d220487da942ee482ff52a69b71b3094ed719ede149538278"} Dec 10 13:01:08 crc kubenswrapper[4921]: I1210 13:01:08.804004 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 10 13:01:09 crc kubenswrapper[4921]: I1210 13:01:09.811252 4921 generic.go:334] "Generic (PLEG): container finished" podID="4d637dcb-0378-4034-84f1-8e92718180c3" containerID="314ac163d9bfe6ae7e0d6dc2d212f2945197199b1e28f0f53e9c1ffd9db4ac6b" exitCode=0 Dec 10 13:01:09 crc kubenswrapper[4921]: I1210 13:01:09.811322 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jc7wv" event={"ID":"4d637dcb-0378-4034-84f1-8e92718180c3","Type":"ContainerDied","Data":"314ac163d9bfe6ae7e0d6dc2d212f2945197199b1e28f0f53e9c1ffd9db4ac6b"} Dec 10 13:01:10 crc kubenswrapper[4921]: I1210 13:01:10.818487 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jc7wv" event={"ID":"4d637dcb-0378-4034-84f1-8e92718180c3","Type":"ContainerStarted","Data":"e68bd6ff0d63937192c214179aee57fd985c76f65288ff91f02633b3495b8821"} Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.762775 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jc7wv" podStartSLOduration=4.289876512 podStartE2EDuration="6.762757116s" podCreationTimestamp="2025-12-10 13:01:05 +0000 UTC" firstStartedPulling="2025-12-10 13:01:07.787614578 +0000 UTC m=+265.003836512" lastFinishedPulling="2025-12-10 13:01:10.260495182 +0000 UTC m=+267.476717116" observedRunningTime="2025-12-10 13:01:10.843947347 +0000 UTC m=+268.060169271" watchObservedRunningTime="2025-12-10 13:01:11.762757116 +0000 UTC m=+268.978979040" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.763350 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5gwtb"] Dec 10 13:01:11 crc kubenswrapper[4921]: E1210 13:01:11.763606 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ecd670a-6fc9-4f7c-9394-0a2fe5590147" containerName="collect-profiles" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.763623 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ecd670a-6fc9-4f7c-9394-0a2fe5590147" containerName="collect-profiles" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.763723 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ecd670a-6fc9-4f7c-9394-0a2fe5590147" containerName="collect-profiles" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.764467 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.767615 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.778023 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5gwtb"] Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.781769 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-utilities\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.781829 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vvjg\" (UniqueName: \"kubernetes.io/projected/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-kube-api-access-5vvjg\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.781973 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-catalog-content\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.825255 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hs22j" event={"ID":"20b7355a-9d7b-46c3-be68-346748a6d5d7","Type":"ContainerStarted","Data":"d94b9bc7d30b9096493f621ace9793dcaf5d30ca5148a541912e4241bf143663"} Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.846452 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.882433 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-utilities\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.882488 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vvjg\" (UniqueName: \"kubernetes.io/projected/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-kube-api-access-5vvjg\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.882532 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-catalog-content\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.882955 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-catalog-content\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.883061 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-utilities\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:11 crc kubenswrapper[4921]: I1210 13:01:11.914474 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vvjg\" (UniqueName: \"kubernetes.io/projected/26640aa3-6e2c-4c26-a797-ebcc8cbdbdec-kube-api-access-5vvjg\") pod \"community-operators-5gwtb\" (UID: \"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec\") " pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:12 crc kubenswrapper[4921]: I1210 13:01:12.079791 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:12 crc kubenswrapper[4921]: I1210 13:01:12.316039 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hs22j" podStartSLOduration=3.041202926 podStartE2EDuration="7.316017696s" podCreationTimestamp="2025-12-10 13:01:05 +0000 UTC" firstStartedPulling="2025-12-10 13:01:06.759042914 +0000 UTC m=+263.975264838" lastFinishedPulling="2025-12-10 13:01:11.033857684 +0000 UTC m=+268.250079608" observedRunningTime="2025-12-10 13:01:11.849032036 +0000 UTC m=+269.065253960" watchObservedRunningTime="2025-12-10 13:01:12.316017696 +0000 UTC m=+269.532239640" Dec 10 13:01:12 crc kubenswrapper[4921]: I1210 13:01:12.321328 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5gwtb"] Dec 10 13:01:12 crc kubenswrapper[4921]: W1210 13:01:12.324922 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26640aa3_6e2c_4c26_a797_ebcc8cbdbdec.slice/crio-55a478dc436a574dc4707a4cabed083674f1ecf9aaa22f9ed6a743317cbbb3d3 WatchSource:0}: Error finding container 55a478dc436a574dc4707a4cabed083674f1ecf9aaa22f9ed6a743317cbbb3d3: Status 404 returned error can't find the container with id 55a478dc436a574dc4707a4cabed083674f1ecf9aaa22f9ed6a743317cbbb3d3 Dec 10 13:01:12 crc kubenswrapper[4921]: I1210 13:01:12.831581 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5gwtb" event={"ID":"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec","Type":"ContainerStarted","Data":"55a478dc436a574dc4707a4cabed083674f1ecf9aaa22f9ed6a743317cbbb3d3"} Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.170591 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.591710 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.836994 4921 generic.go:334] "Generic (PLEG): container finished" podID="26640aa3-6e2c-4c26-a797-ebcc8cbdbdec" containerID="bf7e71cb031813777f9081dbfff450ff0ff1dd75642210128b5936f72e01dc8e" exitCode=0 Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.837045 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5gwtb" event={"ID":"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec","Type":"ContainerDied","Data":"bf7e71cb031813777f9081dbfff450ff0ff1dd75642210128b5936f72e01dc8e"} Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.886656 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.886698 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:13 crc kubenswrapper[4921]: I1210 13:01:13.928030 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:14 crc kubenswrapper[4921]: I1210 13:01:14.323146 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 10 13:01:14 crc kubenswrapper[4921]: I1210 13:01:14.888903 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.078368 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.078792 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.115697 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.288481 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.288543 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.328172 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.463223 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.739308 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.742833 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:01:16 crc kubenswrapper[4921]: I1210 13:01:16.885854 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hs22j" Dec 10 13:01:17 crc kubenswrapper[4921]: I1210 13:01:17.258894 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jc7wv" Dec 10 13:01:17 crc kubenswrapper[4921]: I1210 13:01:17.859411 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 13:01:18 crc kubenswrapper[4921]: I1210 13:01:18.864073 4921 generic.go:334] "Generic (PLEG): container finished" podID="26640aa3-6e2c-4c26-a797-ebcc8cbdbdec" containerID="e20efa1c761f2014c950fda35012625d7d054dc78dc565ffa3d90e02c669085f" exitCode=0 Dec 10 13:01:18 crc kubenswrapper[4921]: I1210 13:01:18.864522 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5gwtb" event={"ID":"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec","Type":"ContainerDied","Data":"e20efa1c761f2014c950fda35012625d7d054dc78dc565ffa3d90e02c669085f"} Dec 10 13:01:20 crc kubenswrapper[4921]: I1210 13:01:20.896156 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5gwtb" event={"ID":"26640aa3-6e2c-4c26-a797-ebcc8cbdbdec","Type":"ContainerStarted","Data":"0356fa90932a5d73d363a0c9fb4c3e492a37bbc8df68c88d0201906f36971fd7"} Dec 10 13:01:20 crc kubenswrapper[4921]: I1210 13:01:20.913554 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5gwtb" podStartSLOduration=3.970389951 podStartE2EDuration="9.913530969s" podCreationTimestamp="2025-12-10 13:01:11 +0000 UTC" firstStartedPulling="2025-12-10 13:01:13.839313359 +0000 UTC m=+271.055535293" lastFinishedPulling="2025-12-10 13:01:19.782454367 +0000 UTC m=+276.998676311" observedRunningTime="2025-12-10 13:01:20.912862741 +0000 UTC m=+278.129084675" watchObservedRunningTime="2025-12-10 13:01:20.913530969 +0000 UTC m=+278.129752883" Dec 10 13:01:22 crc kubenswrapper[4921]: I1210 13:01:22.080070 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:22 crc kubenswrapper[4921]: I1210 13:01:22.080844 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:23 crc kubenswrapper[4921]: I1210 13:01:23.122516 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-5gwtb" podUID="26640aa3-6e2c-4c26-a797-ebcc8cbdbdec" containerName="registry-server" probeResult="failure" output=< Dec 10 13:01:23 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 13:01:23 crc kubenswrapper[4921]: > Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.457846 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g"] Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.458812 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" podUID="03c59650-42ea-4995-ada6-f86eb8aed1de" containerName="route-controller-manager" containerID="cri-o://5e2fd9f465095a797730b4e59276447b63b2643244e3aff28570797a377679f5" gracePeriod=30 Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.462030 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mtf22"] Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.462271 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" podUID="2e786864-e8b1-4a03-9327-14d389a5bc21" containerName="controller-manager" containerID="cri-o://9d64276a674c6a2d99dfe9285c7d9e37d416852678303c6c1bd375d233ba3cbd" gracePeriod=30 Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.933558 4921 generic.go:334] "Generic (PLEG): container finished" podID="2e786864-e8b1-4a03-9327-14d389a5bc21" containerID="9d64276a674c6a2d99dfe9285c7d9e37d416852678303c6c1bd375d233ba3cbd" exitCode=0 Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.933638 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" event={"ID":"2e786864-e8b1-4a03-9327-14d389a5bc21","Type":"ContainerDied","Data":"9d64276a674c6a2d99dfe9285c7d9e37d416852678303c6c1bd375d233ba3cbd"} Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.935465 4921 generic.go:334] "Generic (PLEG): container finished" podID="03c59650-42ea-4995-ada6-f86eb8aed1de" containerID="5e2fd9f465095a797730b4e59276447b63b2643244e3aff28570797a377679f5" exitCode=0 Dec 10 13:01:25 crc kubenswrapper[4921]: I1210 13:01:25.935503 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" event={"ID":"03c59650-42ea-4995-ada6-f86eb8aed1de","Type":"ContainerDied","Data":"5e2fd9f465095a797730b4e59276447b63b2643244e3aff28570797a377679f5"} Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.359574 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.372486 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-proxy-ca-bundles\") pod \"2e786864-e8b1-4a03-9327-14d389a5bc21\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.372545 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-config\") pod \"2e786864-e8b1-4a03-9327-14d389a5bc21\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.372634 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62qtp\" (UniqueName: \"kubernetes.io/projected/2e786864-e8b1-4a03-9327-14d389a5bc21-kube-api-access-62qtp\") pod \"2e786864-e8b1-4a03-9327-14d389a5bc21\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.372696 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-client-ca\") pod \"2e786864-e8b1-4a03-9327-14d389a5bc21\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.372730 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e786864-e8b1-4a03-9327-14d389a5bc21-serving-cert\") pod \"2e786864-e8b1-4a03-9327-14d389a5bc21\" (UID: \"2e786864-e8b1-4a03-9327-14d389a5bc21\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.373625 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-client-ca" (OuterVolumeSpecName: "client-ca") pod "2e786864-e8b1-4a03-9327-14d389a5bc21" (UID: "2e786864-e8b1-4a03-9327-14d389a5bc21"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.373868 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-config" (OuterVolumeSpecName: "config") pod "2e786864-e8b1-4a03-9327-14d389a5bc21" (UID: "2e786864-e8b1-4a03-9327-14d389a5bc21"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.374857 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2e786864-e8b1-4a03-9327-14d389a5bc21" (UID: "2e786864-e8b1-4a03-9327-14d389a5bc21"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.379070 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e786864-e8b1-4a03-9327-14d389a5bc21-kube-api-access-62qtp" (OuterVolumeSpecName: "kube-api-access-62qtp") pod "2e786864-e8b1-4a03-9327-14d389a5bc21" (UID: "2e786864-e8b1-4a03-9327-14d389a5bc21"). InnerVolumeSpecName "kube-api-access-62qtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.379130 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e786864-e8b1-4a03-9327-14d389a5bc21-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2e786864-e8b1-4a03-9327-14d389a5bc21" (UID: "2e786864-e8b1-4a03-9327-14d389a5bc21"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.405001 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.473372 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csv6g\" (UniqueName: \"kubernetes.io/projected/03c59650-42ea-4995-ada6-f86eb8aed1de-kube-api-access-csv6g\") pod \"03c59650-42ea-4995-ada6-f86eb8aed1de\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.474326 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-client-ca\") pod \"03c59650-42ea-4995-ada6-f86eb8aed1de\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.474518 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03c59650-42ea-4995-ada6-f86eb8aed1de-serving-cert\") pod \"03c59650-42ea-4995-ada6-f86eb8aed1de\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.474662 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-config\") pod \"03c59650-42ea-4995-ada6-f86eb8aed1de\" (UID: \"03c59650-42ea-4995-ada6-f86eb8aed1de\") " Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.474928 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62qtp\" (UniqueName: \"kubernetes.io/projected/2e786864-e8b1-4a03-9327-14d389a5bc21-kube-api-access-62qtp\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.475017 4921 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.475100 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e786864-e8b1-4a03-9327-14d389a5bc21-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.475192 4921 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.475303 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e786864-e8b1-4a03-9327-14d389a5bc21-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.476865 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-client-ca" (OuterVolumeSpecName: "client-ca") pod "03c59650-42ea-4995-ada6-f86eb8aed1de" (UID: "03c59650-42ea-4995-ada6-f86eb8aed1de"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.477507 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-config" (OuterVolumeSpecName: "config") pod "03c59650-42ea-4995-ada6-f86eb8aed1de" (UID: "03c59650-42ea-4995-ada6-f86eb8aed1de"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.480796 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03c59650-42ea-4995-ada6-f86eb8aed1de-kube-api-access-csv6g" (OuterVolumeSpecName: "kube-api-access-csv6g") pod "03c59650-42ea-4995-ada6-f86eb8aed1de" (UID: "03c59650-42ea-4995-ada6-f86eb8aed1de"). InnerVolumeSpecName "kube-api-access-csv6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.484821 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03c59650-42ea-4995-ada6-f86eb8aed1de-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "03c59650-42ea-4995-ada6-f86eb8aed1de" (UID: "03c59650-42ea-4995-ada6-f86eb8aed1de"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.580279 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csv6g\" (UniqueName: \"kubernetes.io/projected/03c59650-42ea-4995-ada6-f86eb8aed1de-kube-api-access-csv6g\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.580735 4921 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.580751 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03c59650-42ea-4995-ada6-f86eb8aed1de-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.580760 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03c59650-42ea-4995-ada6-f86eb8aed1de-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.677448 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84"] Dec 10 13:01:26 crc kubenswrapper[4921]: E1210 13:01:26.677689 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c59650-42ea-4995-ada6-f86eb8aed1de" containerName="route-controller-manager" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.677702 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c59650-42ea-4995-ada6-f86eb8aed1de" containerName="route-controller-manager" Dec 10 13:01:26 crc kubenswrapper[4921]: E1210 13:01:26.677719 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e786864-e8b1-4a03-9327-14d389a5bc21" containerName="controller-manager" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.677724 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e786864-e8b1-4a03-9327-14d389a5bc21" containerName="controller-manager" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.677816 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c59650-42ea-4995-ada6-f86eb8aed1de" containerName="route-controller-manager" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.677824 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e786864-e8b1-4a03-9327-14d389a5bc21" containerName="controller-manager" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.678195 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.681986 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-55b77bb5f6-bltk9"] Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.682536 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-client-ca\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.682599 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f48071-becb-4bf7-8dea-391df35c8ace-serving-cert\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.682630 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fghr\" (UniqueName: \"kubernetes.io/projected/77f48071-becb-4bf7-8dea-391df35c8ace-kube-api-access-7fghr\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.682669 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-config\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.682826 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.691485 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-55b77bb5f6-bltk9"] Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.702234 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84"] Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.783919 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-config\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.783999 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sqlg\" (UniqueName: \"kubernetes.io/projected/2991ea41-8180-4b67-9ebc-c206b737da6d-kube-api-access-5sqlg\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784040 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-config\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784067 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-proxy-ca-bundles\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784101 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-client-ca\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784128 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-client-ca\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784157 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f48071-becb-4bf7-8dea-391df35c8ace-serving-cert\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784176 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fghr\" (UniqueName: \"kubernetes.io/projected/77f48071-becb-4bf7-8dea-391df35c8ace-kube-api-access-7fghr\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.784202 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2991ea41-8180-4b67-9ebc-c206b737da6d-serving-cert\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.785281 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-client-ca\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.785484 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-config\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.802700 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fghr\" (UniqueName: \"kubernetes.io/projected/77f48071-becb-4bf7-8dea-391df35c8ace-kube-api-access-7fghr\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.803901 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f48071-becb-4bf7-8dea-391df35c8ace-serving-cert\") pod \"route-controller-manager-6d4798f878-n7s84\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.885363 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sqlg\" (UniqueName: \"kubernetes.io/projected/2991ea41-8180-4b67-9ebc-c206b737da6d-kube-api-access-5sqlg\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.885457 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-config\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.885498 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-proxy-ca-bundles\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.885536 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-client-ca\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.885598 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2991ea41-8180-4b67-9ebc-c206b737da6d-serving-cert\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.887321 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-client-ca\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.887451 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-proxy-ca-bundles\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.887505 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-config\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.890855 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2991ea41-8180-4b67-9ebc-c206b737da6d-serving-cert\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.912462 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sqlg\" (UniqueName: \"kubernetes.io/projected/2991ea41-8180-4b67-9ebc-c206b737da6d-kube-api-access-5sqlg\") pod \"controller-manager-55b77bb5f6-bltk9\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.944143 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.944472 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mtf22" event={"ID":"2e786864-e8b1-4a03-9327-14d389a5bc21","Type":"ContainerDied","Data":"38b358b9451d96742aac5a769fa6d5bd3298c083faa8dcc5f43ab00441e87389"} Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.945323 4921 scope.go:117] "RemoveContainer" containerID="9d64276a674c6a2d99dfe9285c7d9e37d416852678303c6c1bd375d233ba3cbd" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.946300 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" event={"ID":"03c59650-42ea-4995-ada6-f86eb8aed1de","Type":"ContainerDied","Data":"042ea5734c535957713ddcb332676dcd6b76c8c1d834b028e928c5e0d8df1928"} Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.946357 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.967694 4921 scope.go:117] "RemoveContainer" containerID="5e2fd9f465095a797730b4e59276447b63b2643244e3aff28570797a377679f5" Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.989496 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g"] Dec 10 13:01:26 crc kubenswrapper[4921]: I1210 13:01:26.996155 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-55m5g"] Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.001339 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mtf22"] Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.003415 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mtf22"] Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.003766 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.012184 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.218371 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03c59650-42ea-4995-ada6-f86eb8aed1de" path="/var/lib/kubelet/pods/03c59650-42ea-4995-ada6-f86eb8aed1de/volumes" Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.219438 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e786864-e8b1-4a03-9327-14d389a5bc21" path="/var/lib/kubelet/pods/2e786864-e8b1-4a03-9327-14d389a5bc21/volumes" Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.340448 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-55b77bb5f6-bltk9"] Dec 10 13:01:27 crc kubenswrapper[4921]: W1210 13:01:27.346026 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2991ea41_8180_4b67_9ebc_c206b737da6d.slice/crio-04cd4a6a7eef6d1b0b12489a24a954565793752f3f797ac4e8068ebbddab8eeb WatchSource:0}: Error finding container 04cd4a6a7eef6d1b0b12489a24a954565793752f3f797ac4e8068ebbddab8eeb: Status 404 returned error can't find the container with id 04cd4a6a7eef6d1b0b12489a24a954565793752f3f797ac4e8068ebbddab8eeb Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.480398 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84"] Dec 10 13:01:27 crc kubenswrapper[4921]: W1210 13:01:27.487447 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77f48071_becb_4bf7_8dea_391df35c8ace.slice/crio-c3349630f1918434860cce30829e7e3e4e1d1b949465dd5ddb7120eb6c522532 WatchSource:0}: Error finding container c3349630f1918434860cce30829e7e3e4e1d1b949465dd5ddb7120eb6c522532: Status 404 returned error can't find the container with id c3349630f1918434860cce30829e7e3e4e1d1b949465dd5ddb7120eb6c522532 Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.953255 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" event={"ID":"2991ea41-8180-4b67-9ebc-c206b737da6d","Type":"ContainerStarted","Data":"1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f"} Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.953675 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" event={"ID":"2991ea41-8180-4b67-9ebc-c206b737da6d","Type":"ContainerStarted","Data":"04cd4a6a7eef6d1b0b12489a24a954565793752f3f797ac4e8068ebbddab8eeb"} Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.954008 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.960233 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" event={"ID":"77f48071-becb-4bf7-8dea-391df35c8ace","Type":"ContainerStarted","Data":"02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226"} Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.960282 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" event={"ID":"77f48071-becb-4bf7-8dea-391df35c8ace","Type":"ContainerStarted","Data":"c3349630f1918434860cce30829e7e3e4e1d1b949465dd5ddb7120eb6c522532"} Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.960745 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:27 crc kubenswrapper[4921]: I1210 13:01:27.978508 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:01:28 crc kubenswrapper[4921]: I1210 13:01:28.014183 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" podStartSLOduration=3.014159313 podStartE2EDuration="3.014159313s" podCreationTimestamp="2025-12-10 13:01:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:01:28.012527039 +0000 UTC m=+285.228748973" watchObservedRunningTime="2025-12-10 13:01:28.014159313 +0000 UTC m=+285.230381237" Dec 10 13:01:28 crc kubenswrapper[4921]: I1210 13:01:28.014759 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" podStartSLOduration=3.014752869 podStartE2EDuration="3.014752869s" podCreationTimestamp="2025-12-10 13:01:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:01:27.990755991 +0000 UTC m=+285.206977935" watchObservedRunningTime="2025-12-10 13:01:28.014752869 +0000 UTC m=+285.230974803" Dec 10 13:01:28 crc kubenswrapper[4921]: I1210 13:01:28.385265 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:32 crc kubenswrapper[4921]: I1210 13:01:32.127275 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:32 crc kubenswrapper[4921]: I1210 13:01:32.182573 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5gwtb" Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.538638 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84"] Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.539406 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" podUID="77f48071-becb-4bf7-8dea-391df35c8ace" containerName="route-controller-manager" containerID="cri-o://02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226" gracePeriod=30 Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.919823 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.962924 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f48071-becb-4bf7-8dea-391df35c8ace-serving-cert\") pod \"77f48071-becb-4bf7-8dea-391df35c8ace\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.963269 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-config\") pod \"77f48071-becb-4bf7-8dea-391df35c8ace\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.963340 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-client-ca\") pod \"77f48071-becb-4bf7-8dea-391df35c8ace\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.963367 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fghr\" (UniqueName: \"kubernetes.io/projected/77f48071-becb-4bf7-8dea-391df35c8ace-kube-api-access-7fghr\") pod \"77f48071-becb-4bf7-8dea-391df35c8ace\" (UID: \"77f48071-becb-4bf7-8dea-391df35c8ace\") " Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.964030 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-client-ca" (OuterVolumeSpecName: "client-ca") pod "77f48071-becb-4bf7-8dea-391df35c8ace" (UID: "77f48071-becb-4bf7-8dea-391df35c8ace"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.964041 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-config" (OuterVolumeSpecName: "config") pod "77f48071-becb-4bf7-8dea-391df35c8ace" (UID: "77f48071-becb-4bf7-8dea-391df35c8ace"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.972478 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77f48071-becb-4bf7-8dea-391df35c8ace-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "77f48071-becb-4bf7-8dea-391df35c8ace" (UID: "77f48071-becb-4bf7-8dea-391df35c8ace"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:01:47 crc kubenswrapper[4921]: I1210 13:01:47.975526 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77f48071-becb-4bf7-8dea-391df35c8ace-kube-api-access-7fghr" (OuterVolumeSpecName: "kube-api-access-7fghr") pod "77f48071-becb-4bf7-8dea-391df35c8ace" (UID: "77f48071-becb-4bf7-8dea-391df35c8ace"). InnerVolumeSpecName "kube-api-access-7fghr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.053927 4921 generic.go:334] "Generic (PLEG): container finished" podID="77f48071-becb-4bf7-8dea-391df35c8ace" containerID="02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226" exitCode=0 Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.053984 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" event={"ID":"77f48071-becb-4bf7-8dea-391df35c8ace","Type":"ContainerDied","Data":"02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226"} Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.054038 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" event={"ID":"77f48071-becb-4bf7-8dea-391df35c8ace","Type":"ContainerDied","Data":"c3349630f1918434860cce30829e7e3e4e1d1b949465dd5ddb7120eb6c522532"} Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.054058 4921 scope.go:117] "RemoveContainer" containerID="02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.054056 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.064071 4921 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.064463 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fghr\" (UniqueName: \"kubernetes.io/projected/77f48071-becb-4bf7-8dea-391df35c8ace-kube-api-access-7fghr\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.064474 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f48071-becb-4bf7-8dea-391df35c8ace-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.064483 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f48071-becb-4bf7-8dea-391df35c8ace-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.074907 4921 scope.go:117] "RemoveContainer" containerID="02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226" Dec 10 13:01:48 crc kubenswrapper[4921]: E1210 13:01:48.075532 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226\": container with ID starting with 02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226 not found: ID does not exist" containerID="02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.075576 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226"} err="failed to get container status \"02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226\": rpc error: code = NotFound desc = could not find container \"02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226\": container with ID starting with 02e977b1169bad621adecc74a039111beaf6e4bce86b6831c15a08bf8cc37226 not found: ID does not exist" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.087347 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84"] Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.090539 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d4798f878-n7s84"] Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.694628 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr"] Dec 10 13:01:48 crc kubenswrapper[4921]: E1210 13:01:48.694917 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77f48071-becb-4bf7-8dea-391df35c8ace" containerName="route-controller-manager" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.694937 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="77f48071-becb-4bf7-8dea-391df35c8ace" containerName="route-controller-manager" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.695109 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="77f48071-becb-4bf7-8dea-391df35c8ace" containerName="route-controller-manager" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.695653 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.697418 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.697977 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.698241 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.698475 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.699927 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.704330 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.705519 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr"] Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.771347 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3b99977-bed4-4b59-874a-10add7157f07-config\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.771477 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3b99977-bed4-4b59-874a-10add7157f07-serving-cert\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.771547 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3b99977-bed4-4b59-874a-10add7157f07-client-ca\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.771623 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn7h6\" (UniqueName: \"kubernetes.io/projected/c3b99977-bed4-4b59-874a-10add7157f07-kube-api-access-pn7h6\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.872353 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3b99977-bed4-4b59-874a-10add7157f07-client-ca\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.872461 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn7h6\" (UniqueName: \"kubernetes.io/projected/c3b99977-bed4-4b59-874a-10add7157f07-kube-api-access-pn7h6\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.872887 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3b99977-bed4-4b59-874a-10add7157f07-config\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.873453 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3b99977-bed4-4b59-874a-10add7157f07-client-ca\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.873926 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3b99977-bed4-4b59-874a-10add7157f07-config\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.873998 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3b99977-bed4-4b59-874a-10add7157f07-serving-cert\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.889515 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3b99977-bed4-4b59-874a-10add7157f07-serving-cert\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:48 crc kubenswrapper[4921]: I1210 13:01:48.891953 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn7h6\" (UniqueName: \"kubernetes.io/projected/c3b99977-bed4-4b59-874a-10add7157f07-kube-api-access-pn7h6\") pod \"route-controller-manager-6fbd899f86-phmkr\" (UID: \"c3b99977-bed4-4b59-874a-10add7157f07\") " pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:49 crc kubenswrapper[4921]: I1210 13:01:49.010740 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:49 crc kubenswrapper[4921]: I1210 13:01:49.198307 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77f48071-becb-4bf7-8dea-391df35c8ace" path="/var/lib/kubelet/pods/77f48071-becb-4bf7-8dea-391df35c8ace/volumes" Dec 10 13:01:49 crc kubenswrapper[4921]: I1210 13:01:49.390827 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr"] Dec 10 13:01:50 crc kubenswrapper[4921]: I1210 13:01:50.074701 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" event={"ID":"c3b99977-bed4-4b59-874a-10add7157f07","Type":"ContainerStarted","Data":"e80c0ccb0242e07fa0d39a7bfd7ba54ca793f94f02ef56582d2420b2f46ba616"} Dec 10 13:01:50 crc kubenswrapper[4921]: I1210 13:01:50.074760 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" event={"ID":"c3b99977-bed4-4b59-874a-10add7157f07","Type":"ContainerStarted","Data":"eff7c3d7fec2e7836c438a2f52223b5c2d086abf09408c818f81f958f3358932"} Dec 10 13:01:50 crc kubenswrapper[4921]: I1210 13:01:50.075207 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:50 crc kubenswrapper[4921]: I1210 13:01:50.080996 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" Dec 10 13:01:50 crc kubenswrapper[4921]: I1210 13:01:50.121077 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6fbd899f86-phmkr" podStartSLOduration=3.121051015 podStartE2EDuration="3.121051015s" podCreationTimestamp="2025-12-10 13:01:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:01:50.097485618 +0000 UTC m=+307.313707642" watchObservedRunningTime="2025-12-10 13:01:50.121051015 +0000 UTC m=+307.337272939" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.337472 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7wmbk"] Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.338673 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.359205 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7wmbk"] Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536269 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-bound-sa-token\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536354 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536403 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xggc4\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-kube-api-access-xggc4\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536425 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536442 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536473 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-trusted-ca\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536491 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-registry-certificates\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.536507 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-registry-tls\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.560167 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637358 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-trusted-ca\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637413 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-registry-certificates\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637432 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-registry-tls\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637475 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-bound-sa-token\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637505 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xggc4\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-kube-api-access-xggc4\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637525 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.637541 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.638221 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.638766 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-trusted-ca\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.638909 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-registry-certificates\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.644461 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-registry-tls\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.645738 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.653331 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xggc4\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-kube-api-access-xggc4\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.654480 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba21bffb-af4b-44fa-98e3-24c9ea9a0a46-bound-sa-token\") pod \"image-registry-66df7c8f76-7wmbk\" (UID: \"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46\") " pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:02 crc kubenswrapper[4921]: I1210 13:02:02.656464 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:03 crc kubenswrapper[4921]: I1210 13:02:03.060515 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7wmbk"] Dec 10 13:02:03 crc kubenswrapper[4921]: I1210 13:02:03.160541 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" event={"ID":"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46","Type":"ContainerStarted","Data":"a200829c671065e416eea914016571207af45ca4f241f02dd8509c0799daad82"} Dec 10 13:02:04 crc kubenswrapper[4921]: I1210 13:02:04.167489 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" event={"ID":"ba21bffb-af4b-44fa-98e3-24c9ea9a0a46","Type":"ContainerStarted","Data":"44f91a83554a3871ece4ea9416b3cd7212a776a382d7f1000161e1be5e1558aa"} Dec 10 13:02:04 crc kubenswrapper[4921]: I1210 13:02:04.167851 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:04 crc kubenswrapper[4921]: I1210 13:02:04.191925 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" podStartSLOduration=2.19190516 podStartE2EDuration="2.19190516s" podCreationTimestamp="2025-12-10 13:02:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:02:04.188846207 +0000 UTC m=+321.405068141" watchObservedRunningTime="2025-12-10 13:02:04.19190516 +0000 UTC m=+321.408127094" Dec 10 13:02:07 crc kubenswrapper[4921]: I1210 13:02:07.538550 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-55b77bb5f6-bltk9"] Dec 10 13:02:07 crc kubenswrapper[4921]: I1210 13:02:07.539233 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" podUID="2991ea41-8180-4b67-9ebc-c206b737da6d" containerName="controller-manager" containerID="cri-o://1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f" gracePeriod=30 Dec 10 13:02:07 crc kubenswrapper[4921]: I1210 13:02:07.910380 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.028619 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2991ea41-8180-4b67-9ebc-c206b737da6d-serving-cert\") pod \"2991ea41-8180-4b67-9ebc-c206b737da6d\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.028703 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5sqlg\" (UniqueName: \"kubernetes.io/projected/2991ea41-8180-4b67-9ebc-c206b737da6d-kube-api-access-5sqlg\") pod \"2991ea41-8180-4b67-9ebc-c206b737da6d\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.028732 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-client-ca\") pod \"2991ea41-8180-4b67-9ebc-c206b737da6d\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.028761 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-proxy-ca-bundles\") pod \"2991ea41-8180-4b67-9ebc-c206b737da6d\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.028826 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-config\") pod \"2991ea41-8180-4b67-9ebc-c206b737da6d\" (UID: \"2991ea41-8180-4b67-9ebc-c206b737da6d\") " Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.029824 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2991ea41-8180-4b67-9ebc-c206b737da6d" (UID: "2991ea41-8180-4b67-9ebc-c206b737da6d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.029850 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-client-ca" (OuterVolumeSpecName: "client-ca") pod "2991ea41-8180-4b67-9ebc-c206b737da6d" (UID: "2991ea41-8180-4b67-9ebc-c206b737da6d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.029902 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-config" (OuterVolumeSpecName: "config") pod "2991ea41-8180-4b67-9ebc-c206b737da6d" (UID: "2991ea41-8180-4b67-9ebc-c206b737da6d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.035373 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2991ea41-8180-4b67-9ebc-c206b737da6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2991ea41-8180-4b67-9ebc-c206b737da6d" (UID: "2991ea41-8180-4b67-9ebc-c206b737da6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.035373 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2991ea41-8180-4b67-9ebc-c206b737da6d-kube-api-access-5sqlg" (OuterVolumeSpecName: "kube-api-access-5sqlg") pod "2991ea41-8180-4b67-9ebc-c206b737da6d" (UID: "2991ea41-8180-4b67-9ebc-c206b737da6d"). InnerVolumeSpecName "kube-api-access-5sqlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.129886 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.129924 4921 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2991ea41-8180-4b67-9ebc-c206b737da6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.129952 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5sqlg\" (UniqueName: \"kubernetes.io/projected/2991ea41-8180-4b67-9ebc-c206b737da6d-kube-api-access-5sqlg\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.129965 4921 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.129976 4921 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2991ea41-8180-4b67-9ebc-c206b737da6d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.191639 4921 generic.go:334] "Generic (PLEG): container finished" podID="2991ea41-8180-4b67-9ebc-c206b737da6d" containerID="1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f" exitCode=0 Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.191653 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" event={"ID":"2991ea41-8180-4b67-9ebc-c206b737da6d","Type":"ContainerDied","Data":"1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f"} Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.191703 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.191704 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55b77bb5f6-bltk9" event={"ID":"2991ea41-8180-4b67-9ebc-c206b737da6d","Type":"ContainerDied","Data":"04cd4a6a7eef6d1b0b12489a24a954565793752f3f797ac4e8068ebbddab8eeb"} Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.191723 4921 scope.go:117] "RemoveContainer" containerID="1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.210186 4921 scope.go:117] "RemoveContainer" containerID="1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f" Dec 10 13:02:08 crc kubenswrapper[4921]: E1210 13:02:08.210624 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f\": container with ID starting with 1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f not found: ID does not exist" containerID="1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.210651 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f"} err="failed to get container status \"1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f\": rpc error: code = NotFound desc = could not find container \"1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f\": container with ID starting with 1bd8927864cffda2310f259e96fcbaf3a24ef90ec5348b55089e41ca1e816e7f not found: ID does not exist" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.223207 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-55b77bb5f6-bltk9"] Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.228694 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-55b77bb5f6-bltk9"] Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.709409 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6bc9989545-j7zzt"] Dec 10 13:02:08 crc kubenswrapper[4921]: E1210 13:02:08.709693 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2991ea41-8180-4b67-9ebc-c206b737da6d" containerName="controller-manager" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.709713 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2991ea41-8180-4b67-9ebc-c206b737da6d" containerName="controller-manager" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.709885 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="2991ea41-8180-4b67-9ebc-c206b737da6d" containerName="controller-manager" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.710436 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.715118 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.715261 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.715309 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.715752 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.716450 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.717906 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.725257 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.743161 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bc9989545-j7zzt"] Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.845778 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-config\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.845899 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-proxy-ca-bundles\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.845932 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-serving-cert\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.846051 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf67j\" (UniqueName: \"kubernetes.io/projected/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-kube-api-access-pf67j\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.846074 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-client-ca\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.947694 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-proxy-ca-bundles\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.947753 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-serving-cert\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.947803 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf67j\" (UniqueName: \"kubernetes.io/projected/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-kube-api-access-pf67j\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.947829 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-client-ca\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.947877 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-config\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.949051 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-proxy-ca-bundles\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.949333 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-config\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.949442 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-client-ca\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.953672 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-serving-cert\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:08 crc kubenswrapper[4921]: I1210 13:02:08.966200 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf67j\" (UniqueName: \"kubernetes.io/projected/fcb3ec6d-74ad-45a8-b325-a44ab5b212a3-kube-api-access-pf67j\") pod \"controller-manager-6bc9989545-j7zzt\" (UID: \"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3\") " pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:09 crc kubenswrapper[4921]: I1210 13:02:09.035433 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:09 crc kubenswrapper[4921]: I1210 13:02:09.203063 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2991ea41-8180-4b67-9ebc-c206b737da6d" path="/var/lib/kubelet/pods/2991ea41-8180-4b67-9ebc-c206b737da6d/volumes" Dec 10 13:02:09 crc kubenswrapper[4921]: I1210 13:02:09.253153 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bc9989545-j7zzt"] Dec 10 13:02:10 crc kubenswrapper[4921]: I1210 13:02:10.205666 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" event={"ID":"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3","Type":"ContainerStarted","Data":"665a57d13d5e2a10d097083b3fdce76ef3a3bc1710c3b8330c150392b3c2bef3"} Dec 10 13:02:10 crc kubenswrapper[4921]: I1210 13:02:10.206028 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" event={"ID":"fcb3ec6d-74ad-45a8-b325-a44ab5b212a3","Type":"ContainerStarted","Data":"70d98ed05e77b7bd57164b5b90e1da30ad5c03d2094a4cd7307d141224c0fb44"} Dec 10 13:02:10 crc kubenswrapper[4921]: I1210 13:02:10.206059 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:10 crc kubenswrapper[4921]: I1210 13:02:10.210833 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" Dec 10 13:02:10 crc kubenswrapper[4921]: I1210 13:02:10.226459 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6bc9989545-j7zzt" podStartSLOduration=3.226439938 podStartE2EDuration="3.226439938s" podCreationTimestamp="2025-12-10 13:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:02:10.224860015 +0000 UTC m=+327.441081949" watchObservedRunningTime="2025-12-10 13:02:10.226439938 +0000 UTC m=+327.442661852" Dec 10 13:02:16 crc kubenswrapper[4921]: I1210 13:02:16.711419 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:02:16 crc kubenswrapper[4921]: I1210 13:02:16.711952 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:02:22 crc kubenswrapper[4921]: I1210 13:02:22.663148 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-7wmbk" Dec 10 13:02:22 crc kubenswrapper[4921]: I1210 13:02:22.724736 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-556st"] Dec 10 13:02:46 crc kubenswrapper[4921]: I1210 13:02:46.711190 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:02:46 crc kubenswrapper[4921]: I1210 13:02:46.711819 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:02:47 crc kubenswrapper[4921]: I1210 13:02:47.775529 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-556st" podUID="ae0b7e23-ca18-4adc-aa3a-551c273d45af" containerName="registry" containerID="cri-o://d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98" gracePeriod=30 Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.135197 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.201187 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.201244 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae0b7e23-ca18-4adc-aa3a-551c273d45af-ca-trust-extracted\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.201274 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg59x\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-kube-api-access-tg59x\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.201295 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-bound-sa-token\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.211547 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-kube-api-access-tg59x" (OuterVolumeSpecName: "kube-api-access-tg59x") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "kube-api-access-tg59x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.215199 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.222683 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.228366 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae0b7e23-ca18-4adc-aa3a-551c273d45af-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.302213 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-tls\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.302377 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-certificates\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.302710 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-trusted-ca\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303296 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae0b7e23-ca18-4adc-aa3a-551c273d45af-installation-pull-secrets\") pod \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\" (UID: \"ae0b7e23-ca18-4adc-aa3a-551c273d45af\") " Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303251 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303318 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303769 4921 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ae0b7e23-ca18-4adc-aa3a-551c273d45af-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303785 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg59x\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-kube-api-access-tg59x\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303799 4921 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303807 4921 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.303815 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae0b7e23-ca18-4adc-aa3a-551c273d45af-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.305914 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0b7e23-ca18-4adc-aa3a-551c273d45af-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.305918 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ae0b7e23-ca18-4adc-aa3a-551c273d45af" (UID: "ae0b7e23-ca18-4adc-aa3a-551c273d45af"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.403849 4921 generic.go:334] "Generic (PLEG): container finished" podID="ae0b7e23-ca18-4adc-aa3a-551c273d45af" containerID="d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98" exitCode=0 Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.403924 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-556st" event={"ID":"ae0b7e23-ca18-4adc-aa3a-551c273d45af","Type":"ContainerDied","Data":"d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98"} Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.403973 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-556st" event={"ID":"ae0b7e23-ca18-4adc-aa3a-551c273d45af","Type":"ContainerDied","Data":"b3dff19ef12d78ee380f2f4bafff2fde5db273d6d124ba07cbc9a3d2014d5647"} Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.404003 4921 scope.go:117] "RemoveContainer" containerID="d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.404175 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-556st" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.404882 4921 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ae0b7e23-ca18-4adc-aa3a-551c273d45af-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.404924 4921 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ae0b7e23-ca18-4adc-aa3a-551c273d45af-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.427508 4921 scope.go:117] "RemoveContainer" containerID="d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98" Dec 10 13:02:48 crc kubenswrapper[4921]: E1210 13:02:48.427961 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98\": container with ID starting with d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98 not found: ID does not exist" containerID="d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.428017 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98"} err="failed to get container status \"d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98\": rpc error: code = NotFound desc = could not find container \"d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98\": container with ID starting with d500b473e93ca1215c34f918306aaa6bc11ebb4e159112579321453bebbfbb98 not found: ID does not exist" Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.445457 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-556st"] Dec 10 13:02:48 crc kubenswrapper[4921]: I1210 13:02:48.448687 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-556st"] Dec 10 13:02:49 crc kubenswrapper[4921]: I1210 13:02:49.200071 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae0b7e23-ca18-4adc-aa3a-551c273d45af" path="/var/lib/kubelet/pods/ae0b7e23-ca18-4adc-aa3a-551c273d45af/volumes" Dec 10 13:03:16 crc kubenswrapper[4921]: I1210 13:03:16.711170 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:03:16 crc kubenswrapper[4921]: I1210 13:03:16.711686 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:03:16 crc kubenswrapper[4921]: I1210 13:03:16.711735 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:03:16 crc kubenswrapper[4921]: I1210 13:03:16.712278 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d291bc9e31f368d19d6f0d518943c401b76d569a551a18fee6a5eac2aeea537d"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:03:16 crc kubenswrapper[4921]: I1210 13:03:16.712328 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://d291bc9e31f368d19d6f0d518943c401b76d569a551a18fee6a5eac2aeea537d" gracePeriod=600 Dec 10 13:03:17 crc kubenswrapper[4921]: I1210 13:03:17.561917 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="d291bc9e31f368d19d6f0d518943c401b76d569a551a18fee6a5eac2aeea537d" exitCode=0 Dec 10 13:03:17 crc kubenswrapper[4921]: I1210 13:03:17.562042 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"d291bc9e31f368d19d6f0d518943c401b76d569a551a18fee6a5eac2aeea537d"} Dec 10 13:03:17 crc kubenswrapper[4921]: I1210 13:03:17.562573 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"65cabfaddf4e4a7b2b469a25f75e10271c9f98df50571e3320da2cebc7ca5d27"} Dec 10 13:03:17 crc kubenswrapper[4921]: I1210 13:03:17.562604 4921 scope.go:117] "RemoveContainer" containerID="27975eaa70887a1e6ec3bc21ce170bbe5dfe5a05172264be8c8bd343aea02998" Dec 10 13:05:16 crc kubenswrapper[4921]: I1210 13:05:16.710507 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:05:16 crc kubenswrapper[4921]: I1210 13:05:16.711094 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:05:46 crc kubenswrapper[4921]: I1210 13:05:46.711485 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:05:46 crc kubenswrapper[4921]: I1210 13:05:46.712116 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.052062 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mwqs8"] Dec 10 13:06:08 crc kubenswrapper[4921]: E1210 13:06:08.053872 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0b7e23-ca18-4adc-aa3a-551c273d45af" containerName="registry" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.053985 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0b7e23-ca18-4adc-aa3a-551c273d45af" containerName="registry" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.054198 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0b7e23-ca18-4adc-aa3a-551c273d45af" containerName="registry" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.054724 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.058161 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cxncf"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.058937 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-cxncf" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.067119 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.067119 4921 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-crrq6" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.067349 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.067434 4921 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-db6lc" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.071089 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mwqs8"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.079086 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-bl74c"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.079870 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.083585 4921 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-z52jw" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.086519 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbz5p\" (UniqueName: \"kubernetes.io/projected/edd2568d-f1f7-4327-8e90-769d0598adca-kube-api-access-wbz5p\") pod \"cert-manager-cainjector-7f985d654d-mwqs8\" (UID: \"edd2568d-f1f7-4327-8e90-769d0598adca\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.086649 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nn5j\" (UniqueName: \"kubernetes.io/projected/92834ade-8e0b-4bc4-82e4-7ca664e21681-kube-api-access-7nn5j\") pod \"cert-manager-5b446d88c5-cxncf\" (UID: \"92834ade-8e0b-4bc4-82e4-7ca664e21681\") " pod="cert-manager/cert-manager-5b446d88c5-cxncf" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.096343 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cxncf"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.107359 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-bl74c"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.188265 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nn5j\" (UniqueName: \"kubernetes.io/projected/92834ade-8e0b-4bc4-82e4-7ca664e21681-kube-api-access-7nn5j\") pod \"cert-manager-5b446d88c5-cxncf\" (UID: \"92834ade-8e0b-4bc4-82e4-7ca664e21681\") " pod="cert-manager/cert-manager-5b446d88c5-cxncf" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.188356 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbz5p\" (UniqueName: \"kubernetes.io/projected/edd2568d-f1f7-4327-8e90-769d0598adca-kube-api-access-wbz5p\") pod \"cert-manager-cainjector-7f985d654d-mwqs8\" (UID: \"edd2568d-f1f7-4327-8e90-769d0598adca\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.188417 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhsj2\" (UniqueName: \"kubernetes.io/projected/e4f33fbe-8180-499a-812d-8473d0178c72-kube-api-access-fhsj2\") pod \"cert-manager-webhook-5655c58dd6-bl74c\" (UID: \"e4f33fbe-8180-499a-812d-8473d0178c72\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.207264 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nn5j\" (UniqueName: \"kubernetes.io/projected/92834ade-8e0b-4bc4-82e4-7ca664e21681-kube-api-access-7nn5j\") pod \"cert-manager-5b446d88c5-cxncf\" (UID: \"92834ade-8e0b-4bc4-82e4-7ca664e21681\") " pod="cert-manager/cert-manager-5b446d88c5-cxncf" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.207845 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbz5p\" (UniqueName: \"kubernetes.io/projected/edd2568d-f1f7-4327-8e90-769d0598adca-kube-api-access-wbz5p\") pod \"cert-manager-cainjector-7f985d654d-mwqs8\" (UID: \"edd2568d-f1f7-4327-8e90-769d0598adca\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.289639 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhsj2\" (UniqueName: \"kubernetes.io/projected/e4f33fbe-8180-499a-812d-8473d0178c72-kube-api-access-fhsj2\") pod \"cert-manager-webhook-5655c58dd6-bl74c\" (UID: \"e4f33fbe-8180-499a-812d-8473d0178c72\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.306732 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhsj2\" (UniqueName: \"kubernetes.io/projected/e4f33fbe-8180-499a-812d-8473d0178c72-kube-api-access-fhsj2\") pod \"cert-manager-webhook-5655c58dd6-bl74c\" (UID: \"e4f33fbe-8180-499a-812d-8473d0178c72\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.374319 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.379660 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-cxncf" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.394653 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.616553 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mwqs8"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.625543 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.647764 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-cxncf"] Dec 10 13:06:08 crc kubenswrapper[4921]: I1210 13:06:08.685593 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-bl74c"] Dec 10 13:06:08 crc kubenswrapper[4921]: W1210 13:06:08.688768 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4f33fbe_8180_499a_812d_8473d0178c72.slice/crio-582e1180bf782789cb41b482ba27c0102bac60c501c957e65b42c2ac25da1cec WatchSource:0}: Error finding container 582e1180bf782789cb41b482ba27c0102bac60c501c957e65b42c2ac25da1cec: Status 404 returned error can't find the container with id 582e1180bf782789cb41b482ba27c0102bac60c501c957e65b42c2ac25da1cec Dec 10 13:06:09 crc kubenswrapper[4921]: I1210 13:06:09.181791 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" event={"ID":"edd2568d-f1f7-4327-8e90-769d0598adca","Type":"ContainerStarted","Data":"01813b7966b0a9db15a0fcf1cbd195f33990d7d5f168be2b181cdc66b8711abb"} Dec 10 13:06:09 crc kubenswrapper[4921]: I1210 13:06:09.183421 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" event={"ID":"e4f33fbe-8180-499a-812d-8473d0178c72","Type":"ContainerStarted","Data":"582e1180bf782789cb41b482ba27c0102bac60c501c957e65b42c2ac25da1cec"} Dec 10 13:06:09 crc kubenswrapper[4921]: I1210 13:06:09.184217 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-cxncf" event={"ID":"92834ade-8e0b-4bc4-82e4-7ca664e21681","Type":"ContainerStarted","Data":"befeebc4e47e29f49685d0474017047bc83e72fb100e61efa30066770210513f"} Dec 10 13:06:12 crc kubenswrapper[4921]: I1210 13:06:12.198701 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-cxncf" event={"ID":"92834ade-8e0b-4bc4-82e4-7ca664e21681","Type":"ContainerStarted","Data":"76016a6a1ded9f2b8721430f90dffeb75360c9452b16d410284c512b9fe56e5d"} Dec 10 13:06:12 crc kubenswrapper[4921]: I1210 13:06:12.202510 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" event={"ID":"edd2568d-f1f7-4327-8e90-769d0598adca","Type":"ContainerStarted","Data":"b0514e0b3c91d5d15fa28aa150b4da300b5a1e80a824ca341bcf0d66e3e182bf"} Dec 10 13:06:12 crc kubenswrapper[4921]: I1210 13:06:12.216307 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-cxncf" podStartSLOduration=1.574994362 podStartE2EDuration="4.216292887s" podCreationTimestamp="2025-12-10 13:06:08 +0000 UTC" firstStartedPulling="2025-12-10 13:06:08.660597437 +0000 UTC m=+565.876819361" lastFinishedPulling="2025-12-10 13:06:11.301895962 +0000 UTC m=+568.518117886" observedRunningTime="2025-12-10 13:06:12.21344807 +0000 UTC m=+569.429669994" watchObservedRunningTime="2025-12-10 13:06:12.216292887 +0000 UTC m=+569.432514811" Dec 10 13:06:12 crc kubenswrapper[4921]: I1210 13:06:12.236181 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-mwqs8" podStartSLOduration=1.556841231 podStartE2EDuration="4.236166214s" podCreationTimestamp="2025-12-10 13:06:08 +0000 UTC" firstStartedPulling="2025-12-10 13:06:08.62518848 +0000 UTC m=+565.841410404" lastFinishedPulling="2025-12-10 13:06:11.304513453 +0000 UTC m=+568.520735387" observedRunningTime="2025-12-10 13:06:12.23563129 +0000 UTC m=+569.451853224" watchObservedRunningTime="2025-12-10 13:06:12.236166214 +0000 UTC m=+569.452388138" Dec 10 13:06:13 crc kubenswrapper[4921]: I1210 13:06:13.208853 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" event={"ID":"e4f33fbe-8180-499a-812d-8473d0178c72","Type":"ContainerStarted","Data":"c9ca98c33e0c4742908bc02782523d0702bf438bba0d9988d0a64faaede4c6cf"} Dec 10 13:06:13 crc kubenswrapper[4921]: I1210 13:06:13.209152 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:13 crc kubenswrapper[4921]: I1210 13:06:13.231908 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" podStartSLOduration=1.415177946 podStartE2EDuration="5.231888696s" podCreationTimestamp="2025-12-10 13:06:08 +0000 UTC" firstStartedPulling="2025-12-10 13:06:08.693155736 +0000 UTC m=+565.909377660" lastFinishedPulling="2025-12-10 13:06:12.509866486 +0000 UTC m=+569.726088410" observedRunningTime="2025-12-10 13:06:13.228563656 +0000 UTC m=+570.444785580" watchObservedRunningTime="2025-12-10 13:06:13.231888696 +0000 UTC m=+570.448110630" Dec 10 13:06:16 crc kubenswrapper[4921]: I1210 13:06:16.711324 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:06:16 crc kubenswrapper[4921]: I1210 13:06:16.711431 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:06:16 crc kubenswrapper[4921]: I1210 13:06:16.711484 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:06:16 crc kubenswrapper[4921]: I1210 13:06:16.712099 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"65cabfaddf4e4a7b2b469a25f75e10271c9f98df50571e3320da2cebc7ca5d27"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:06:16 crc kubenswrapper[4921]: I1210 13:06:16.712166 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://65cabfaddf4e4a7b2b469a25f75e10271c9f98df50571e3320da2cebc7ca5d27" gracePeriod=600 Dec 10 13:06:17 crc kubenswrapper[4921]: I1210 13:06:17.229558 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="65cabfaddf4e4a7b2b469a25f75e10271c9f98df50571e3320da2cebc7ca5d27" exitCode=0 Dec 10 13:06:17 crc kubenswrapper[4921]: I1210 13:06:17.229622 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"65cabfaddf4e4a7b2b469a25f75e10271c9f98df50571e3320da2cebc7ca5d27"} Dec 10 13:06:17 crc kubenswrapper[4921]: I1210 13:06:17.229841 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"14289058d0fb041d586a9216e5a19d1f702167fc4a4034c67755ae206f4ba1ec"} Dec 10 13:06:17 crc kubenswrapper[4921]: I1210 13:06:17.229864 4921 scope.go:117] "RemoveContainer" containerID="d291bc9e31f368d19d6f0d518943c401b76d569a551a18fee6a5eac2aeea537d" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.397752 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-bl74c" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509110 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m7n89"] Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509597 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-controller" containerID="cri-o://59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509770 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="sbdb" containerID="cri-o://dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509825 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="nbdb" containerID="cri-o://933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509895 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="northd" containerID="cri-o://f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509939 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-node" containerID="cri-o://8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.510072 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.509964 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-acl-logging" containerID="cri-o://27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.555048 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" containerID="cri-o://45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" gracePeriod=30 Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.869170 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/2.log" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.872536 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovn-acl-logging/0.log" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.872996 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovn-controller/0.log" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.873754 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929514 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5scwb"] Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929730 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929749 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929766 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="sbdb" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929775 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="sbdb" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929786 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929794 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929808 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-acl-logging" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929818 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-acl-logging" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929826 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929836 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929849 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929858 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929871 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929879 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929890 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kubecfg-setup" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929898 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kubecfg-setup" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929909 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929916 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929929 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="northd" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929936 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="northd" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929950 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="nbdb" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929958 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="nbdb" Dec 10 13:06:18 crc kubenswrapper[4921]: E1210 13:06:18.929973 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-node" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.929982 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-node" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930096 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-acl-logging" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930111 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930121 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930130 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="nbdb" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930144 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930156 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovn-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930166 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="sbdb" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930177 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="kube-rbac-proxy-node" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930188 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="northd" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930419 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.930432 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="50684108-04fc-405c-82be-d21d16cd650b" containerName="ovnkube-controller" Dec 10 13:06:18 crc kubenswrapper[4921]: I1210 13:06:18.932339 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.024848 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-ovn-kubernetes\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.024883 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-kubelet\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.024915 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-openvswitch\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.024970 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.024988 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025012 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/50684108-04fc-405c-82be-d21d16cd650b-ovn-node-metrics-cert\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025091 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-systemd-units\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025032 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025136 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-bin\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025156 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-netns\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025176 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025183 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ddcs\" (UniqueName: \"kubernetes.io/projected/50684108-04fc-405c-82be-d21d16cd650b-kube-api-access-9ddcs\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026047 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-ovn\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026081 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-node-log\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025204 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.025204 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026106 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-script-lib\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026136 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026135 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-node-log" (OuterVolumeSpecName: "node-log") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026154 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-env-overrides\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026186 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-config\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026215 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-log-socket\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026233 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-slash\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026252 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-netd\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026270 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-etc-openvswitch\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026288 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-systemd\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026308 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026447 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-var-lib-openvswitch\") pod \"50684108-04fc-405c-82be-d21d16cd650b\" (UID: \"50684108-04fc-405c-82be-d21d16cd650b\") " Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026476 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026505 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026497 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026576 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026592 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026636 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-log-socket" (OuterVolumeSpecName: "log-socket") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026648 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-slash" (OuterVolumeSpecName: "host-slash") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026668 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026766 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-var-lib-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026930 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026949 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-run-ovn-kubernetes\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.026997 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027071 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-node-log\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027134 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-systemd-units\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027159 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-etc-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027218 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-kubelet\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027251 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-log-socket\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027272 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-slash\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027287 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-systemd\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027301 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-cni-bin\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027322 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-run-netns\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027337 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-ovn\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027452 4921 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-node-log\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027473 4921 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027503 4921 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027539 4921 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/50684108-04fc-405c-82be-d21d16cd650b-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027549 4921 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-log-socket\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027558 4921 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-slash\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027567 4921 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027578 4921 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027589 4921 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027600 4921 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027611 4921 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027621 4921 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027633 4921 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027642 4921 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027651 4921 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027659 4921 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.027668 4921 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.030158 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50684108-04fc-405c-82be-d21d16cd650b-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.030741 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50684108-04fc-405c-82be-d21d16cd650b-kube-api-access-9ddcs" (OuterVolumeSpecName: "kube-api-access-9ddcs") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "kube-api-access-9ddcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.040550 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "50684108-04fc-405c-82be-d21d16cd650b" (UID: "50684108-04fc-405c-82be-d21d16cd650b"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128190 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-systemd-units\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128236 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-ovnkube-config\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128264 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-etc-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128306 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-cni-netd\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128327 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-kubelet\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128358 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w5rk\" (UniqueName: \"kubernetes.io/projected/2502fe99-0289-472a-b984-93d8e037383d-kube-api-access-6w5rk\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128382 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-log-socket\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128445 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-slash\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128468 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-env-overrides\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128502 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-systemd\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128523 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-cni-bin\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128549 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-run-netns\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128580 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-ovn\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128609 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2502fe99-0289-472a-b984-93d8e037383d-ovn-node-metrics-cert\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128613 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-systemd-units\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128646 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128673 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-kubelet\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128687 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-var-lib-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128710 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-log-socket\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128728 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-run-ovn-kubernetes\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128751 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-systemd\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128652 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-etc-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128757 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-ovnkube-script-lib\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128779 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-run-netns\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128793 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128798 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-ovn\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128731 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-slash\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128821 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-var-lib-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128696 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-cni-bin\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128847 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-run-openvswitch\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128871 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-run-ovn-kubernetes\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128933 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-node-log\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128979 4921 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/50684108-04fc-405c-82be-d21d16cd650b-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.128995 4921 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/50684108-04fc-405c-82be-d21d16cd650b-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.129008 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ddcs\" (UniqueName: \"kubernetes.io/projected/50684108-04fc-405c-82be-d21d16cd650b-kube-api-access-9ddcs\") on node \"crc\" DevicePath \"\"" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.129035 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-node-log\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231293 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-env-overrides\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231344 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2502fe99-0289-472a-b984-93d8e037383d-ovn-node-metrics-cert\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231372 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231430 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-ovnkube-script-lib\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231482 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-ovnkube-config\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231511 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-cni-netd\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.231531 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w5rk\" (UniqueName: \"kubernetes.io/projected/2502fe99-0289-472a-b984-93d8e037383d-kube-api-access-6w5rk\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.232380 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-env-overrides\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.233130 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-ovnkube-script-lib\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.233181 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.233210 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2502fe99-0289-472a-b984-93d8e037383d-host-cni-netd\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.233230 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2502fe99-0289-472a-b984-93d8e037383d-ovnkube-config\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.236345 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2502fe99-0289-472a-b984-93d8e037383d-ovn-node-metrics-cert\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.250590 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w5rk\" (UniqueName: \"kubernetes.io/projected/2502fe99-0289-472a-b984-93d8e037383d-kube-api-access-6w5rk\") pod \"ovnkube-node-5scwb\" (UID: \"2502fe99-0289-472a-b984-93d8e037383d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.253780 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pqlx4_78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e/kube-multus/1.log" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.254913 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pqlx4_78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e/kube-multus/0.log" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.254951 4921 generic.go:334] "Generic (PLEG): container finished" podID="78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e" containerID="78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4" exitCode=2 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.255040 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pqlx4" event={"ID":"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e","Type":"ContainerDied","Data":"78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.255186 4921 scope.go:117] "RemoveContainer" containerID="480da3b2621712c4562f9423dc98fdbf17a9dc45365f129777611bc7e934c709" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.256011 4921 scope.go:117] "RemoveContainer" containerID="78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.256206 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-pqlx4_openshift-multus(78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e)\"" pod="openshift-multus/multus-pqlx4" podUID="78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.259828 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovnkube-controller/2.log" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.262045 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovn-acl-logging/0.log" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.262749 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m7n89_50684108-04fc-405c-82be-d21d16cd650b/ovn-controller/0.log" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263085 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" exitCode=0 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263108 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" exitCode=0 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263115 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" exitCode=0 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263124 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" exitCode=0 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263131 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" exitCode=0 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263138 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" exitCode=0 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263145 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" exitCode=143 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263152 4921 generic.go:334] "Generic (PLEG): container finished" podID="50684108-04fc-405c-82be-d21d16cd650b" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" exitCode=143 Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263169 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263192 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263203 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263212 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263228 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263237 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263246 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263255 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263261 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263267 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263272 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263277 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263281 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263286 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263291 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263296 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263302 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263309 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263314 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263319 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263324 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263328 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263333 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263339 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263344 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263348 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263353 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263360 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263367 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263373 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263378 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263406 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263419 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263425 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263431 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263437 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263442 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263446 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263455 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" event={"ID":"50684108-04fc-405c-82be-d21d16cd650b","Type":"ContainerDied","Data":"0c86cc1337c544dcdb33d7cf0733f5db47dddbe6b25adcaaf3f31fa873f9a175"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263465 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263470 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263476 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263482 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263489 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263503 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263510 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263516 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263521 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263526 4921 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.263629 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m7n89" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.281902 4921 scope.go:117] "RemoveContainer" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.297913 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m7n89"] Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.302698 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m7n89"] Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.309052 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.324896 4921 scope.go:117] "RemoveContainer" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.338003 4921 scope.go:117] "RemoveContainer" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.352603 4921 scope.go:117] "RemoveContainer" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.368054 4921 scope.go:117] "RemoveContainer" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.384555 4921 scope.go:117] "RemoveContainer" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.396794 4921 scope.go:117] "RemoveContainer" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.411263 4921 scope.go:117] "RemoveContainer" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.424282 4921 scope.go:117] "RemoveContainer" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.437180 4921 scope.go:117] "RemoveContainer" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.437616 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": container with ID starting with 45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a not found: ID does not exist" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.437668 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} err="failed to get container status \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": rpc error: code = NotFound desc = could not find container \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": container with ID starting with 45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.437692 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.437958 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": container with ID starting with 551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed not found: ID does not exist" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.438061 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} err="failed to get container status \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": rpc error: code = NotFound desc = could not find container \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": container with ID starting with 551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.438187 4921 scope.go:117] "RemoveContainer" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.438482 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": container with ID starting with dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119 not found: ID does not exist" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.438527 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} err="failed to get container status \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": rpc error: code = NotFound desc = could not find container \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": container with ID starting with dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.438545 4921 scope.go:117] "RemoveContainer" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.438797 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": container with ID starting with 933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f not found: ID does not exist" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.438819 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} err="failed to get container status \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": rpc error: code = NotFound desc = could not find container \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": container with ID starting with 933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.438832 4921 scope.go:117] "RemoveContainer" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.439280 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": container with ID starting with f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e not found: ID does not exist" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.439367 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} err="failed to get container status \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": rpc error: code = NotFound desc = could not find container \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": container with ID starting with f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.439478 4921 scope.go:117] "RemoveContainer" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.439754 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": container with ID starting with 1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7 not found: ID does not exist" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.439799 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} err="failed to get container status \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": rpc error: code = NotFound desc = could not find container \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": container with ID starting with 1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.439818 4921 scope.go:117] "RemoveContainer" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.440085 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": container with ID starting with 8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e not found: ID does not exist" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.440176 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} err="failed to get container status \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": rpc error: code = NotFound desc = could not find container \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": container with ID starting with 8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.440258 4921 scope.go:117] "RemoveContainer" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.440553 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": container with ID starting with 27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063 not found: ID does not exist" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.440596 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} err="failed to get container status \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": rpc error: code = NotFound desc = could not find container \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": container with ID starting with 27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.440613 4921 scope.go:117] "RemoveContainer" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.440863 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": container with ID starting with 59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7 not found: ID does not exist" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.440887 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} err="failed to get container status \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": rpc error: code = NotFound desc = could not find container \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": container with ID starting with 59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.440924 4921 scope.go:117] "RemoveContainer" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" Dec 10 13:06:19 crc kubenswrapper[4921]: E1210 13:06:19.441126 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": container with ID starting with 34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f not found: ID does not exist" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.441170 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} err="failed to get container status \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": rpc error: code = NotFound desc = could not find container \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": container with ID starting with 34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.441187 4921 scope.go:117] "RemoveContainer" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.441759 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} err="failed to get container status \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": rpc error: code = NotFound desc = could not find container \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": container with ID starting with 45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.441799 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442039 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} err="failed to get container status \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": rpc error: code = NotFound desc = could not find container \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": container with ID starting with 551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442085 4921 scope.go:117] "RemoveContainer" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442273 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} err="failed to get container status \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": rpc error: code = NotFound desc = could not find container \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": container with ID starting with dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442296 4921 scope.go:117] "RemoveContainer" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442492 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} err="failed to get container status \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": rpc error: code = NotFound desc = could not find container \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": container with ID starting with 933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442515 4921 scope.go:117] "RemoveContainer" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442684 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} err="failed to get container status \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": rpc error: code = NotFound desc = could not find container \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": container with ID starting with f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442707 4921 scope.go:117] "RemoveContainer" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442858 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} err="failed to get container status \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": rpc error: code = NotFound desc = could not find container \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": container with ID starting with 1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.442877 4921 scope.go:117] "RemoveContainer" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443093 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} err="failed to get container status \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": rpc error: code = NotFound desc = could not find container \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": container with ID starting with 8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443115 4921 scope.go:117] "RemoveContainer" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443320 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} err="failed to get container status \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": rpc error: code = NotFound desc = could not find container \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": container with ID starting with 27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443359 4921 scope.go:117] "RemoveContainer" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443572 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} err="failed to get container status \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": rpc error: code = NotFound desc = could not find container \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": container with ID starting with 59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443594 4921 scope.go:117] "RemoveContainer" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443747 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} err="failed to get container status \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": rpc error: code = NotFound desc = could not find container \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": container with ID starting with 34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443766 4921 scope.go:117] "RemoveContainer" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443944 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} err="failed to get container status \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": rpc error: code = NotFound desc = could not find container \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": container with ID starting with 45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.443965 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444166 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} err="failed to get container status \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": rpc error: code = NotFound desc = could not find container \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": container with ID starting with 551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444188 4921 scope.go:117] "RemoveContainer" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444426 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} err="failed to get container status \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": rpc error: code = NotFound desc = could not find container \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": container with ID starting with dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444447 4921 scope.go:117] "RemoveContainer" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444726 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} err="failed to get container status \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": rpc error: code = NotFound desc = could not find container \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": container with ID starting with 933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444749 4921 scope.go:117] "RemoveContainer" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.444965 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} err="failed to get container status \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": rpc error: code = NotFound desc = could not find container \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": container with ID starting with f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445009 4921 scope.go:117] "RemoveContainer" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445175 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} err="failed to get container status \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": rpc error: code = NotFound desc = could not find container \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": container with ID starting with 1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445195 4921 scope.go:117] "RemoveContainer" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445378 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} err="failed to get container status \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": rpc error: code = NotFound desc = could not find container \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": container with ID starting with 8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445415 4921 scope.go:117] "RemoveContainer" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445701 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} err="failed to get container status \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": rpc error: code = NotFound desc = could not find container \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": container with ID starting with 27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445744 4921 scope.go:117] "RemoveContainer" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.445994 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} err="failed to get container status \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": rpc error: code = NotFound desc = could not find container \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": container with ID starting with 59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446016 4921 scope.go:117] "RemoveContainer" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446215 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} err="failed to get container status \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": rpc error: code = NotFound desc = could not find container \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": container with ID starting with 34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446236 4921 scope.go:117] "RemoveContainer" containerID="45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446530 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a"} err="failed to get container status \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": rpc error: code = NotFound desc = could not find container \"45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a\": container with ID starting with 45f1cf4017292b2abcad5c57a340cd9c9b14a2bbfeeba7c10e3d0b3bcc0dca5a not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446571 4921 scope.go:117] "RemoveContainer" containerID="551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446792 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed"} err="failed to get container status \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": rpc error: code = NotFound desc = could not find container \"551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed\": container with ID starting with 551dc5f4c39c06d2143805320061efc95d84e870eeecf23b3a64d829653810ed not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.446813 4921 scope.go:117] "RemoveContainer" containerID="dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447077 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119"} err="failed to get container status \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": rpc error: code = NotFound desc = could not find container \"dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119\": container with ID starting with dd0025f5be6e68aba73c349dd732281dead920b7d8c2d307b4a67cfdafb99119 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447119 4921 scope.go:117] "RemoveContainer" containerID="933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447314 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f"} err="failed to get container status \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": rpc error: code = NotFound desc = could not find container \"933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f\": container with ID starting with 933c0c81aa0aa2d676a6e404f883a7c81240ef7b07a2e794878c85994d0eb88f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447420 4921 scope.go:117] "RemoveContainer" containerID="f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447637 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e"} err="failed to get container status \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": rpc error: code = NotFound desc = could not find container \"f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e\": container with ID starting with f8f888f214898cb28563da7a77267781622df1f2231c27d1fbdee617ada1ec2e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447680 4921 scope.go:117] "RemoveContainer" containerID="1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447861 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7"} err="failed to get container status \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": rpc error: code = NotFound desc = could not find container \"1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7\": container with ID starting with 1a88b1b9101bc4ab339d394df337e4e11ec8af98b44b621bcb84eed1a0fba3d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.447883 4921 scope.go:117] "RemoveContainer" containerID="8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448054 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e"} err="failed to get container status \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": rpc error: code = NotFound desc = could not find container \"8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e\": container with ID starting with 8fd269a96475df9dccf2f7bd0ffae831f397f49232f5c22df67903b9b8b8161e not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448073 4921 scope.go:117] "RemoveContainer" containerID="27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448285 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063"} err="failed to get container status \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": rpc error: code = NotFound desc = could not find container \"27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063\": container with ID starting with 27956424405bdf6223a96b8fd91b5152276a1501c3de2e07dfafc8b3329a6063 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448325 4921 scope.go:117] "RemoveContainer" containerID="59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448543 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7"} err="failed to get container status \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": rpc error: code = NotFound desc = could not find container \"59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7\": container with ID starting with 59a0f3962237d723e5aa9044de1ddce3673ae1fb4c9e5e0478cd41daa661f6d7 not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448562 4921 scope.go:117] "RemoveContainer" containerID="34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.448724 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f"} err="failed to get container status \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": rpc error: code = NotFound desc = could not find container \"34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f\": container with ID starting with 34932b230bb26e6c4b1bdf433827ce608df8658f6fb76140a4f0ac680dc1d43f not found: ID does not exist" Dec 10 13:06:19 crc kubenswrapper[4921]: I1210 13:06:19.546631 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:19 crc kubenswrapper[4921]: W1210 13:06:19.560190 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2502fe99_0289_472a_b984_93d8e037383d.slice/crio-eac91bcd4099d9f124984c1ab917d51a0ee43bdbf8bf589febf1e8bf89bcdcfc WatchSource:0}: Error finding container eac91bcd4099d9f124984c1ab917d51a0ee43bdbf8bf589febf1e8bf89bcdcfc: Status 404 returned error can't find the container with id eac91bcd4099d9f124984c1ab917d51a0ee43bdbf8bf589febf1e8bf89bcdcfc Dec 10 13:06:20 crc kubenswrapper[4921]: I1210 13:06:20.270569 4921 generic.go:334] "Generic (PLEG): container finished" podID="2502fe99-0289-472a-b984-93d8e037383d" containerID="3e96a32918adf870457c2aafd6da1427e32ee45c757e5c3d9ea978b4c387760f" exitCode=0 Dec 10 13:06:20 crc kubenswrapper[4921]: I1210 13:06:20.270662 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerDied","Data":"3e96a32918adf870457c2aafd6da1427e32ee45c757e5c3d9ea978b4c387760f"} Dec 10 13:06:20 crc kubenswrapper[4921]: I1210 13:06:20.271051 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"eac91bcd4099d9f124984c1ab917d51a0ee43bdbf8bf589febf1e8bf89bcdcfc"} Dec 10 13:06:20 crc kubenswrapper[4921]: I1210 13:06:20.274030 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pqlx4_78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e/kube-multus/1.log" Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.198940 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50684108-04fc-405c-82be-d21d16cd650b" path="/var/lib/kubelet/pods/50684108-04fc-405c-82be-d21d16cd650b/volumes" Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.284197 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"79097965f8754da805ca2473842a86ed0c1b8f26f5481da90e943870448b1d84"} Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.284235 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"76d4fd2d171d46d720d31fa116ffe7b122f5b870569ca98652bd50d8252beb39"} Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.284244 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"f1c049368e473fb6b42c1e5acca56a780806b52e3bae68ec17ce4b3d1a1fcd69"} Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.284254 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"bbdb326a5dfa1cf76c923110e81089a37f92b84bab8592bf51e774b49812b6f4"} Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.284262 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"bb6f1c4e98f0f932b264b7c857ec9d68af59b8ce7de3f8bd35166dd7f5fed60c"} Dec 10 13:06:21 crc kubenswrapper[4921]: I1210 13:06:21.284269 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"f8beb133f8c419baed4c2a5fdb8304059791ca13ce00b2dea5e726f87e102171"} Dec 10 13:06:23 crc kubenswrapper[4921]: I1210 13:06:23.295981 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"af1cf0e9c74ff8f95b1b0dea89adc72e85b389d1894f323621fe2c5e71535384"} Dec 10 13:06:26 crc kubenswrapper[4921]: I1210 13:06:26.331987 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" event={"ID":"2502fe99-0289-472a-b984-93d8e037383d","Type":"ContainerStarted","Data":"448fd17a82fff23b46eab9d2e7d6fc75afdbd9d515fdfd20e997377791240f6d"} Dec 10 13:06:26 crc kubenswrapper[4921]: I1210 13:06:26.333192 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:26 crc kubenswrapper[4921]: I1210 13:06:26.333304 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:26 crc kubenswrapper[4921]: I1210 13:06:26.366503 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:26 crc kubenswrapper[4921]: I1210 13:06:26.376196 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" podStartSLOduration=8.376178599 podStartE2EDuration="8.376178599s" podCreationTimestamp="2025-12-10 13:06:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:06:26.374995527 +0000 UTC m=+583.591217461" watchObservedRunningTime="2025-12-10 13:06:26.376178599 +0000 UTC m=+583.592400523" Dec 10 13:06:27 crc kubenswrapper[4921]: I1210 13:06:27.337207 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:27 crc kubenswrapper[4921]: I1210 13:06:27.359507 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:34 crc kubenswrapper[4921]: I1210 13:06:34.194142 4921 scope.go:117] "RemoveContainer" containerID="78a25a5392eaeaa8d5c01232765dce78525cb15225f6d54f198f5ec652979da4" Dec 10 13:06:34 crc kubenswrapper[4921]: I1210 13:06:34.377589 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pqlx4_78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e/kube-multus/1.log" Dec 10 13:06:34 crc kubenswrapper[4921]: I1210 13:06:34.377918 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pqlx4" event={"ID":"78fe4323-9d8e-40ea-9c79-4bbfe47c8d8e","Type":"ContainerStarted","Data":"bcb1f8906ae83b8a9fa111dfaec4e1733802793a7ae39a1207ddfa5a9e9ca39d"} Dec 10 13:06:49 crc kubenswrapper[4921]: I1210 13:06:49.569302 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5scwb" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.059741 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf"] Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.061292 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.064172 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.072558 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf"] Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.202232 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.202277 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.202325 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt5md\" (UniqueName: \"kubernetes.io/projected/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-kube-api-access-xt5md\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.303125 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.303171 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.303217 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt5md\" (UniqueName: \"kubernetes.io/projected/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-kube-api-access-xt5md\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.304089 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.304405 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.321223 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt5md\" (UniqueName: \"kubernetes.io/projected/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-kube-api-access-xt5md\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.376066 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:06:59 crc kubenswrapper[4921]: I1210 13:06:59.539865 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf"] Dec 10 13:07:00 crc kubenswrapper[4921]: I1210 13:07:00.529198 4921 generic.go:334] "Generic (PLEG): container finished" podID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerID="63c6cdeeb0e1807e03a2847111caf34864dd40d477a96bdb7525af800dd072de" exitCode=0 Dec 10 13:07:00 crc kubenswrapper[4921]: I1210 13:07:00.529400 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" event={"ID":"e0c8a533-3e1a-4c58-b97a-d9054e2d6476","Type":"ContainerDied","Data":"63c6cdeeb0e1807e03a2847111caf34864dd40d477a96bdb7525af800dd072de"} Dec 10 13:07:00 crc kubenswrapper[4921]: I1210 13:07:00.529788 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" event={"ID":"e0c8a533-3e1a-4c58-b97a-d9054e2d6476","Type":"ContainerStarted","Data":"eff1dc06a54f55e7cd675df90d8bff85740be5b234c4ac5694ad33c486a8a37e"} Dec 10 13:07:03 crc kubenswrapper[4921]: I1210 13:07:03.544875 4921 generic.go:334] "Generic (PLEG): container finished" podID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerID="d36504a7f58bb747b6996ef6f8a7e7041f9da9e20ab97a30c742752847b26ee0" exitCode=0 Dec 10 13:07:03 crc kubenswrapper[4921]: I1210 13:07:03.544930 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" event={"ID":"e0c8a533-3e1a-4c58-b97a-d9054e2d6476","Type":"ContainerDied","Data":"d36504a7f58bb747b6996ef6f8a7e7041f9da9e20ab97a30c742752847b26ee0"} Dec 10 13:07:04 crc kubenswrapper[4921]: I1210 13:07:04.556559 4921 generic.go:334] "Generic (PLEG): container finished" podID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerID="f3d7ca3ef3615de05a2d44e8cb487617abe5036b0fc45e38ad088c5e47bde03b" exitCode=0 Dec 10 13:07:04 crc kubenswrapper[4921]: I1210 13:07:04.556764 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" event={"ID":"e0c8a533-3e1a-4c58-b97a-d9054e2d6476","Type":"ContainerDied","Data":"f3d7ca3ef3615de05a2d44e8cb487617abe5036b0fc45e38ad088c5e47bde03b"} Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.784527 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.827108 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-util\") pod \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.827168 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-bundle\") pod \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.827262 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xt5md\" (UniqueName: \"kubernetes.io/projected/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-kube-api-access-xt5md\") pod \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\" (UID: \"e0c8a533-3e1a-4c58-b97a-d9054e2d6476\") " Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.828230 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-bundle" (OuterVolumeSpecName: "bundle") pod "e0c8a533-3e1a-4c58-b97a-d9054e2d6476" (UID: "e0c8a533-3e1a-4c58-b97a-d9054e2d6476"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.835600 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-kube-api-access-xt5md" (OuterVolumeSpecName: "kube-api-access-xt5md") pod "e0c8a533-3e1a-4c58-b97a-d9054e2d6476" (UID: "e0c8a533-3e1a-4c58-b97a-d9054e2d6476"). InnerVolumeSpecName "kube-api-access-xt5md". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.840222 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-util" (OuterVolumeSpecName: "util") pod "e0c8a533-3e1a-4c58-b97a-d9054e2d6476" (UID: "e0c8a533-3e1a-4c58-b97a-d9054e2d6476"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.928456 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xt5md\" (UniqueName: \"kubernetes.io/projected/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-kube-api-access-xt5md\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.928484 4921 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-util\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:05 crc kubenswrapper[4921]: I1210 13:07:05.928494 4921 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e0c8a533-3e1a-4c58-b97a-d9054e2d6476-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:06 crc kubenswrapper[4921]: I1210 13:07:06.579759 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" event={"ID":"e0c8a533-3e1a-4c58-b97a-d9054e2d6476","Type":"ContainerDied","Data":"eff1dc06a54f55e7cd675df90d8bff85740be5b234c4ac5694ad33c486a8a37e"} Dec 10 13:07:06 crc kubenswrapper[4921]: I1210 13:07:06.580287 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eff1dc06a54f55e7cd675df90d8bff85740be5b234c4ac5694ad33c486a8a37e" Dec 10 13:07:06 crc kubenswrapper[4921]: I1210 13:07:06.579829 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.702740 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc"] Dec 10 13:07:07 crc kubenswrapper[4921]: E1210 13:07:07.702974 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="extract" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.702987 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="extract" Dec 10 13:07:07 crc kubenswrapper[4921]: E1210 13:07:07.703001 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="pull" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.703007 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="pull" Dec 10 13:07:07 crc kubenswrapper[4921]: E1210 13:07:07.703014 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="util" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.703021 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="util" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.703104 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0c8a533-3e1a-4c58-b97a-d9054e2d6476" containerName="extract" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.703466 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.705735 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-clnn5" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.705791 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.707016 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.717906 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc"] Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.772947 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6znl2\" (UniqueName: \"kubernetes.io/projected/c580327c-ba4c-4336-b0b2-81d2303b967d-kube-api-access-6znl2\") pod \"nmstate-operator-5b5b58f5c8-slfqc\" (UID: \"c580327c-ba4c-4336-b0b2-81d2303b967d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.874147 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6znl2\" (UniqueName: \"kubernetes.io/projected/c580327c-ba4c-4336-b0b2-81d2303b967d-kube-api-access-6znl2\") pod \"nmstate-operator-5b5b58f5c8-slfqc\" (UID: \"c580327c-ba4c-4336-b0b2-81d2303b967d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" Dec 10 13:07:07 crc kubenswrapper[4921]: I1210 13:07:07.890253 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6znl2\" (UniqueName: \"kubernetes.io/projected/c580327c-ba4c-4336-b0b2-81d2303b967d-kube-api-access-6znl2\") pod \"nmstate-operator-5b5b58f5c8-slfqc\" (UID: \"c580327c-ba4c-4336-b0b2-81d2303b967d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" Dec 10 13:07:08 crc kubenswrapper[4921]: I1210 13:07:08.019895 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" Dec 10 13:07:08 crc kubenswrapper[4921]: I1210 13:07:08.207470 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc"] Dec 10 13:07:08 crc kubenswrapper[4921]: W1210 13:07:08.220095 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc580327c_ba4c_4336_b0b2_81d2303b967d.slice/crio-6f38622de479a420825a9d7da521b45dd131d2bbb7f2b66299927bd9dc392d0c WatchSource:0}: Error finding container 6f38622de479a420825a9d7da521b45dd131d2bbb7f2b66299927bd9dc392d0c: Status 404 returned error can't find the container with id 6f38622de479a420825a9d7da521b45dd131d2bbb7f2b66299927bd9dc392d0c Dec 10 13:07:08 crc kubenswrapper[4921]: I1210 13:07:08.589615 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" event={"ID":"c580327c-ba4c-4336-b0b2-81d2303b967d","Type":"ContainerStarted","Data":"6f38622de479a420825a9d7da521b45dd131d2bbb7f2b66299927bd9dc392d0c"} Dec 10 13:07:12 crc kubenswrapper[4921]: I1210 13:07:12.611057 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" event={"ID":"c580327c-ba4c-4336-b0b2-81d2303b967d","Type":"ContainerStarted","Data":"ec624ee211ef3c7c0e68595a0c446417ec86e2747bb1a79ccc0b3d862e55d676"} Dec 10 13:07:12 crc kubenswrapper[4921]: I1210 13:07:12.634434 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-slfqc" podStartSLOduration=2.05981726 podStartE2EDuration="5.634417273s" podCreationTimestamp="2025-12-10 13:07:07 +0000 UTC" firstStartedPulling="2025-12-10 13:07:08.222084962 +0000 UTC m=+625.438306876" lastFinishedPulling="2025-12-10 13:07:11.796684965 +0000 UTC m=+629.012906889" observedRunningTime="2025-12-10 13:07:12.630311602 +0000 UTC m=+629.846533536" watchObservedRunningTime="2025-12-10 13:07:12.634417273 +0000 UTC m=+629.850639207" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.622231 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.623250 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.625710 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.626376 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:13 crc kubenswrapper[4921]: W1210 13:07:13.628348 4921 reflector.go:561] object-"openshift-nmstate"/"nmstate-handler-dockercfg-bxhjd": failed to list *v1.Secret: secrets "nmstate-handler-dockercfg-bxhjd" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-nmstate": no relationship found between node 'crc' and this object Dec 10 13:07:13 crc kubenswrapper[4921]: E1210 13:07:13.628408 4921 reflector.go:158] "Unhandled Error" err="object-\"openshift-nmstate\"/\"nmstate-handler-dockercfg-bxhjd\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"nmstate-handler-dockercfg-bxhjd\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-nmstate\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.631046 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.637458 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnwzh\" (UniqueName: \"kubernetes.io/projected/173dac82-56c2-4045-aa84-52d6531c6a0a-kube-api-access-rnwzh\") pod \"nmstate-metrics-7f946cbc9-2gzh4\" (UID: \"173dac82-56c2-4045-aa84-52d6531c6a0a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.637503 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/18a6708a-0f88-4b00-95ca-18199f78a88e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.637566 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwfrk\" (UniqueName: \"kubernetes.io/projected/18a6708a-0f88-4b00-95ca-18199f78a88e-kube-api-access-vwfrk\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.665362 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.673041 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.675682 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-zb8qp"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.676647 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.738936 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/18a6708a-0f88-4b00-95ca-18199f78a88e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.739044 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwfrk\" (UniqueName: \"kubernetes.io/projected/18a6708a-0f88-4b00-95ca-18199f78a88e-kube-api-access-vwfrk\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.739102 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnwzh\" (UniqueName: \"kubernetes.io/projected/173dac82-56c2-4045-aa84-52d6531c6a0a-kube-api-access-rnwzh\") pod \"nmstate-metrics-7f946cbc9-2gzh4\" (UID: \"173dac82-56c2-4045-aa84-52d6531c6a0a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" Dec 10 13:07:13 crc kubenswrapper[4921]: E1210 13:07:13.740199 4921 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 10 13:07:13 crc kubenswrapper[4921]: E1210 13:07:13.740557 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18a6708a-0f88-4b00-95ca-18199f78a88e-tls-key-pair podName:18a6708a-0f88-4b00-95ca-18199f78a88e nodeName:}" failed. No retries permitted until 2025-12-10 13:07:14.24053521 +0000 UTC m=+631.456757134 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/18a6708a-0f88-4b00-95ca-18199f78a88e-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-sx7mt" (UID: "18a6708a-0f88-4b00-95ca-18199f78a88e") : secret "openshift-nmstate-webhook" not found Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.758236 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwfrk\" (UniqueName: \"kubernetes.io/projected/18a6708a-0f88-4b00-95ca-18199f78a88e-kube-api-access-vwfrk\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.759161 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnwzh\" (UniqueName: \"kubernetes.io/projected/173dac82-56c2-4045-aa84-52d6531c6a0a-kube-api-access-rnwzh\") pod \"nmstate-metrics-7f946cbc9-2gzh4\" (UID: \"173dac82-56c2-4045-aa84-52d6531c6a0a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.816910 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.817703 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.821439 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-847nv" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.821637 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.821780 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.839414 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.841623 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-ovs-socket\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.841702 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c72z2\" (UniqueName: \"kubernetes.io/projected/5c585358-c7f8-430a-a95e-56dfbf0e07e9-kube-api-access-c72z2\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.841743 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-nmstate-lock\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.841777 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-dbus-socket\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943616 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c72z2\" (UniqueName: \"kubernetes.io/projected/5c585358-c7f8-430a-a95e-56dfbf0e07e9-kube-api-access-c72z2\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943659 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a01c2903-c6f2-4490-9211-07ffc7af0431-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943688 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-nmstate-lock\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943711 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-dbus-socket\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943734 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gshhd\" (UniqueName: \"kubernetes.io/projected/a01c2903-c6f2-4490-9211-07ffc7af0431-kube-api-access-gshhd\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943775 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a01c2903-c6f2-4490-9211-07ffc7af0431-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943793 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-ovs-socket\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.943883 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-ovs-socket\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.944203 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-nmstate-lock\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.944622 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5c585358-c7f8-430a-a95e-56dfbf0e07e9-dbus-socket\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.962254 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c72z2\" (UniqueName: \"kubernetes.io/projected/5c585358-c7f8-430a-a95e-56dfbf0e07e9-kube-api-access-c72z2\") pod \"nmstate-handler-zb8qp\" (UID: \"5c585358-c7f8-430a-a95e-56dfbf0e07e9\") " pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.990469 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-c5d567c8-9zhdp"] Dec 10 13:07:13 crc kubenswrapper[4921]: I1210 13:07:13.991127 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.004524 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c5d567c8-9zhdp"] Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.044558 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a01c2903-c6f2-4490-9211-07ffc7af0431-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.044816 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gshhd\" (UniqueName: \"kubernetes.io/projected/a01c2903-c6f2-4490-9211-07ffc7af0431-kube-api-access-gshhd\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.044935 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a01c2903-c6f2-4490-9211-07ffc7af0431-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: E1210 13:07:14.045142 4921 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 10 13:07:14 crc kubenswrapper[4921]: E1210 13:07:14.045260 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a01c2903-c6f2-4490-9211-07ffc7af0431-plugin-serving-cert podName:a01c2903-c6f2-4490-9211-07ffc7af0431 nodeName:}" failed. No retries permitted until 2025-12-10 13:07:14.545244181 +0000 UTC m=+631.761466105 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/a01c2903-c6f2-4490-9211-07ffc7af0431-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-qsr5j" (UID: "a01c2903-c6f2-4490-9211-07ffc7af0431") : secret "plugin-serving-cert" not found Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.046490 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a01c2903-c6f2-4490-9211-07ffc7af0431-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.061875 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gshhd\" (UniqueName: \"kubernetes.io/projected/a01c2903-c6f2-4490-9211-07ffc7af0431-kube-api-access-gshhd\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.146433 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-service-ca\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.146745 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-oauth-config\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.146853 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-trusted-ca-bundle\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.146942 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-oauth-serving-cert\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.147018 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr86s\" (UniqueName: \"kubernetes.io/projected/2d30ab5c-3c9d-465f-9643-4458afbdf989-kube-api-access-fr86s\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.147092 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-config\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.147173 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-serving-cert\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.248581 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-oauth-serving-cert\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.248892 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr86s\" (UniqueName: \"kubernetes.io/projected/2d30ab5c-3c9d-465f-9643-4458afbdf989-kube-api-access-fr86s\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.248981 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-config\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.249110 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-serving-cert\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.249250 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-service-ca\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.249401 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/18a6708a-0f88-4b00-95ca-18199f78a88e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.249503 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-oauth-config\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.249592 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-trusted-ca-bundle\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.250212 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-service-ca\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.251008 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-config\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.251266 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-trusted-ca-bundle\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.251865 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-serving-cert\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.251958 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d30ab5c-3c9d-465f-9643-4458afbdf989-oauth-serving-cert\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.253681 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d30ab5c-3c9d-465f-9643-4458afbdf989-console-oauth-config\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.254364 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/18a6708a-0f88-4b00-95ca-18199f78a88e-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-sx7mt\" (UID: \"18a6708a-0f88-4b00-95ca-18199f78a88e\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.276265 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr86s\" (UniqueName: \"kubernetes.io/projected/2d30ab5c-3c9d-465f-9643-4458afbdf989-kube-api-access-fr86s\") pod \"console-c5d567c8-9zhdp\" (UID: \"2d30ab5c-3c9d-465f-9643-4458afbdf989\") " pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.308553 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.507528 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-c5d567c8-9zhdp"] Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.554657 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a01c2903-c6f2-4490-9211-07ffc7af0431-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.559321 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a01c2903-c6f2-4490-9211-07ffc7af0431-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-qsr5j\" (UID: \"a01c2903-c6f2-4490-9211-07ffc7af0431\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.621336 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c5d567c8-9zhdp" event={"ID":"2d30ab5c-3c9d-465f-9643-4458afbdf989","Type":"ContainerStarted","Data":"27dfaebbf8597eb9a811a091db76f3ee63d49335c579c4ae0211b43f7192b0d7"} Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.731608 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.926240 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j"] Dec 10 13:07:14 crc kubenswrapper[4921]: W1210 13:07:14.936265 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda01c2903_c6f2_4490_9211_07ffc7af0431.slice/crio-e17b87dfd0d7eeb1f6706d6c0dd8b6e805889637514f9f1829cf94add3cef071 WatchSource:0}: Error finding container e17b87dfd0d7eeb1f6706d6c0dd8b6e805889637514f9f1829cf94add3cef071: Status 404 returned error can't find the container with id e17b87dfd0d7eeb1f6706d6c0dd8b6e805889637514f9f1829cf94add3cef071 Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.939110 4921 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" secret="" err="failed to sync secret cache: timed out waiting for the condition" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.939145 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.992555 4921 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-nmstate/nmstate-handler-zb8qp" secret="" err="failed to sync secret cache: timed out waiting for the condition" Dec 10 13:07:14 crc kubenswrapper[4921]: I1210 13:07:14.992647 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:15 crc kubenswrapper[4921]: W1210 13:07:15.028348 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c585358_c7f8_430a_a95e_56dfbf0e07e9.slice/crio-5d0ac273e2ef711e0e55aed49b0b0150bfdcafbc213df609ef845472b0eb365e WatchSource:0}: Error finding container 5d0ac273e2ef711e0e55aed49b0b0150bfdcafbc213df609ef845472b0eb365e: Status 404 returned error can't find the container with id 5d0ac273e2ef711e0e55aed49b0b0150bfdcafbc213df609ef845472b0eb365e Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.187030 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-bxhjd" Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.190795 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.333014 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4"] Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.370138 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt"] Dec 10 13:07:15 crc kubenswrapper[4921]: W1210 13:07:15.376283 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18a6708a_0f88_4b00_95ca_18199f78a88e.slice/crio-c299231549ad0edb04d77206bfc9439d558d9d9c4f9727d42f141b6e59d6531a WatchSource:0}: Error finding container c299231549ad0edb04d77206bfc9439d558d9d9c4f9727d42f141b6e59d6531a: Status 404 returned error can't find the container with id c299231549ad0edb04d77206bfc9439d558d9d9c4f9727d42f141b6e59d6531a Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.630046 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zb8qp" event={"ID":"5c585358-c7f8-430a-a95e-56dfbf0e07e9","Type":"ContainerStarted","Data":"5d0ac273e2ef711e0e55aed49b0b0150bfdcafbc213df609ef845472b0eb365e"} Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.631689 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-c5d567c8-9zhdp" event={"ID":"2d30ab5c-3c9d-465f-9643-4458afbdf989","Type":"ContainerStarted","Data":"fefda95edb6c43e4753e098247547e58671594f00f10967c777e35dd006f712b"} Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.633727 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" event={"ID":"18a6708a-0f88-4b00-95ca-18199f78a88e","Type":"ContainerStarted","Data":"c299231549ad0edb04d77206bfc9439d558d9d9c4f9727d42f141b6e59d6531a"} Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.635193 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" event={"ID":"173dac82-56c2-4045-aa84-52d6531c6a0a","Type":"ContainerStarted","Data":"40a5963c47af102c3e7cf743cb18b3edd33560cf300ced16a796648d33d0eb1f"} Dec 10 13:07:15 crc kubenswrapper[4921]: I1210 13:07:15.636618 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" event={"ID":"a01c2903-c6f2-4490-9211-07ffc7af0431","Type":"ContainerStarted","Data":"e17b87dfd0d7eeb1f6706d6c0dd8b6e805889637514f9f1829cf94add3cef071"} Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.656427 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" event={"ID":"173dac82-56c2-4045-aa84-52d6531c6a0a","Type":"ContainerStarted","Data":"3d475101e3a4c8b4558ef55421b28cc7aa4b04d567f983521fe3c66b10d1bd85"} Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.660028 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" event={"ID":"a01c2903-c6f2-4490-9211-07ffc7af0431","Type":"ContainerStarted","Data":"b768ac07338368554a54456ae2c9706efe4105e3842c7c8b0c773aad66646885"} Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.662098 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zb8qp" event={"ID":"5c585358-c7f8-430a-a95e-56dfbf0e07e9","Type":"ContainerStarted","Data":"08f952d6a33b65736f5174e7becdf77aec9d138030b4ead0891f9d00443da0fe"} Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.663982 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" event={"ID":"18a6708a-0f88-4b00-95ca-18199f78a88e","Type":"ContainerStarted","Data":"b9b6b776e46741d4cc99a77275c2a9543b2564e592757e26a46b1e2a3335dfa0"} Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.664080 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.676952 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-qsr5j" podStartSLOduration=3.006471265 podStartE2EDuration="5.676935567s" podCreationTimestamp="2025-12-10 13:07:13 +0000 UTC" firstStartedPulling="2025-12-10 13:07:14.939085854 +0000 UTC m=+632.155307778" lastFinishedPulling="2025-12-10 13:07:17.609550156 +0000 UTC m=+634.825772080" observedRunningTime="2025-12-10 13:07:18.676237138 +0000 UTC m=+635.892459092" watchObservedRunningTime="2025-12-10 13:07:18.676935567 +0000 UTC m=+635.893157491" Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.680141 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-c5d567c8-9zhdp" podStartSLOduration=5.680130774 podStartE2EDuration="5.680130774s" podCreationTimestamp="2025-12-10 13:07:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:07:15.654660973 +0000 UTC m=+632.870882917" watchObservedRunningTime="2025-12-10 13:07:18.680130774 +0000 UTC m=+635.896352698" Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.699101 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" podStartSLOduration=3.351772613 podStartE2EDuration="5.699084276s" podCreationTimestamp="2025-12-10 13:07:13 +0000 UTC" firstStartedPulling="2025-12-10 13:07:15.378312798 +0000 UTC m=+632.594534722" lastFinishedPulling="2025-12-10 13:07:17.725624461 +0000 UTC m=+634.941846385" observedRunningTime="2025-12-10 13:07:18.695774026 +0000 UTC m=+635.911995960" watchObservedRunningTime="2025-12-10 13:07:18.699084276 +0000 UTC m=+635.915306190" Dec 10 13:07:18 crc kubenswrapper[4921]: I1210 13:07:18.721001 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-zb8qp" podStartSLOduration=3.026093406 podStartE2EDuration="5.720984807s" podCreationTimestamp="2025-12-10 13:07:13 +0000 UTC" firstStartedPulling="2025-12-10 13:07:15.030793132 +0000 UTC m=+632.247015056" lastFinishedPulling="2025-12-10 13:07:17.725684533 +0000 UTC m=+634.941906457" observedRunningTime="2025-12-10 13:07:18.712888758 +0000 UTC m=+635.929110692" watchObservedRunningTime="2025-12-10 13:07:18.720984807 +0000 UTC m=+635.937206721" Dec 10 13:07:19 crc kubenswrapper[4921]: I1210 13:07:19.669436 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:20 crc kubenswrapper[4921]: I1210 13:07:20.675932 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" event={"ID":"173dac82-56c2-4045-aa84-52d6531c6a0a","Type":"ContainerStarted","Data":"deb216d966f35bbdffdd24fb622ead5f9e3616d76c069184460f7bb3a54aca8f"} Dec 10 13:07:20 crc kubenswrapper[4921]: I1210 13:07:20.700669 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-2gzh4" podStartSLOduration=2.892801115 podStartE2EDuration="7.700648559s" podCreationTimestamp="2025-12-10 13:07:13 +0000 UTC" firstStartedPulling="2025-12-10 13:07:15.350303832 +0000 UTC m=+632.566525756" lastFinishedPulling="2025-12-10 13:07:20.158151276 +0000 UTC m=+637.374373200" observedRunningTime="2025-12-10 13:07:20.699488458 +0000 UTC m=+637.915710392" watchObservedRunningTime="2025-12-10 13:07:20.700648559 +0000 UTC m=+637.916870483" Dec 10 13:07:24 crc kubenswrapper[4921]: I1210 13:07:24.308767 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:24 crc kubenswrapper[4921]: I1210 13:07:24.309594 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:24 crc kubenswrapper[4921]: I1210 13:07:24.315619 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:24 crc kubenswrapper[4921]: I1210 13:07:24.706465 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-c5d567c8-9zhdp" Dec 10 13:07:24 crc kubenswrapper[4921]: I1210 13:07:24.766536 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8pb7c"] Dec 10 13:07:25 crc kubenswrapper[4921]: I1210 13:07:25.012109 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-zb8qp" Dec 10 13:07:35 crc kubenswrapper[4921]: I1210 13:07:35.203844 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-sx7mt" Dec 10 13:07:49 crc kubenswrapper[4921]: I1210 13:07:49.837327 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8pb7c" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" containerID="cri-o://5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7" gracePeriod=15 Dec 10 13:07:49 crc kubenswrapper[4921]: I1210 13:07:49.998551 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz"] Dec 10 13:07:49 crc kubenswrapper[4921]: I1210 13:07:49.999955 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.001977 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.010256 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz"] Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.162080 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.162138 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.162197 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn4lh\" (UniqueName: \"kubernetes.io/projected/1d3566b0-e028-4148-a7be-04cf455a999c-kube-api-access-tn4lh\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.211339 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8pb7c_ad2115a5-1371-4a19-b1e8-7f93a7719a71/console/0.log" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.211471 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.263376 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn4lh\" (UniqueName: \"kubernetes.io/projected/1d3566b0-e028-4148-a7be-04cf455a999c-kube-api-access-tn4lh\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.263488 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.263509 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.263920 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.264133 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.285667 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn4lh\" (UniqueName: \"kubernetes.io/projected/1d3566b0-e028-4148-a7be-04cf455a999c-kube-api-access-tn4lh\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.313859 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364086 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-serving-cert\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364168 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-config\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364217 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-oauth-serving-cert\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364253 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ks8fc\" (UniqueName: \"kubernetes.io/projected/ad2115a5-1371-4a19-b1e8-7f93a7719a71-kube-api-access-ks8fc\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364305 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-trusted-ca-bundle\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364366 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-service-ca\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.364418 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-oauth-config\") pod \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\" (UID: \"ad2115a5-1371-4a19-b1e8-7f93a7719a71\") " Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.365557 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-config" (OuterVolumeSpecName: "console-config") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.365594 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.366070 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.366493 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-service-ca" (OuterVolumeSpecName: "service-ca") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.371908 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.372157 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.375147 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad2115a5-1371-4a19-b1e8-7f93a7719a71-kube-api-access-ks8fc" (OuterVolumeSpecName: "kube-api-access-ks8fc") pod "ad2115a5-1371-4a19-b1e8-7f93a7719a71" (UID: "ad2115a5-1371-4a19-b1e8-7f93a7719a71"). InnerVolumeSpecName "kube-api-access-ks8fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466496 4921 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466694 4921 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466715 4921 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466724 4921 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466732 4921 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-console-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466740 4921 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ad2115a5-1371-4a19-b1e8-7f93a7719a71-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.466749 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ks8fc\" (UniqueName: \"kubernetes.io/projected/ad2115a5-1371-4a19-b1e8-7f93a7719a71-kube-api-access-ks8fc\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.504994 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz"] Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.866445 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8pb7c_ad2115a5-1371-4a19-b1e8-7f93a7719a71/console/0.log" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.866489 4921 generic.go:334] "Generic (PLEG): container finished" podID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerID="5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7" exitCode=2 Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.866523 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8pb7c" event={"ID":"ad2115a5-1371-4a19-b1e8-7f93a7719a71","Type":"ContainerDied","Data":"5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7"} Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.866574 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8pb7c" event={"ID":"ad2115a5-1371-4a19-b1e8-7f93a7719a71","Type":"ContainerDied","Data":"99d3ce77f0b6878db425aa4b744ba0ad1c20c07ff8e7c42826edf00826dea06c"} Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.866583 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8pb7c" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.866596 4921 scope.go:117] "RemoveContainer" containerID="5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.868624 4921 generic.go:334] "Generic (PLEG): container finished" podID="1d3566b0-e028-4148-a7be-04cf455a999c" containerID="0ae7b760bd55a68852a81e337c7a7ebdc9350da94249db15a2b465353850a0bd" exitCode=0 Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.868677 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" event={"ID":"1d3566b0-e028-4148-a7be-04cf455a999c","Type":"ContainerDied","Data":"0ae7b760bd55a68852a81e337c7a7ebdc9350da94249db15a2b465353850a0bd"} Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.868715 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" event={"ID":"1d3566b0-e028-4148-a7be-04cf455a999c","Type":"ContainerStarted","Data":"961579a177356a9cf45714bc7b0ef97438e549f6d6c948149b34bf76aa9c3a49"} Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.890250 4921 scope.go:117] "RemoveContainer" containerID="5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7" Dec 10 13:07:50 crc kubenswrapper[4921]: E1210 13:07:50.893082 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7\": container with ID starting with 5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7 not found: ID does not exist" containerID="5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.893169 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7"} err="failed to get container status \"5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7\": rpc error: code = NotFound desc = could not find container \"5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7\": container with ID starting with 5385b19f4a74f6dbfbfc105a753913007b044a27376d986811760a2ff87984b7 not found: ID does not exist" Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.904805 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8pb7c"] Dec 10 13:07:50 crc kubenswrapper[4921]: I1210 13:07:50.909587 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8pb7c"] Dec 10 13:07:51 crc kubenswrapper[4921]: I1210 13:07:51.199279 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" path="/var/lib/kubelet/pods/ad2115a5-1371-4a19-b1e8-7f93a7719a71/volumes" Dec 10 13:07:52 crc kubenswrapper[4921]: I1210 13:07:52.890496 4921 generic.go:334] "Generic (PLEG): container finished" podID="1d3566b0-e028-4148-a7be-04cf455a999c" containerID="d35c06a49219a6b0323a0eb4012e56cbdb9cab6c35fc49e5fe804ba39f92987e" exitCode=0 Dec 10 13:07:52 crc kubenswrapper[4921]: I1210 13:07:52.890833 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" event={"ID":"1d3566b0-e028-4148-a7be-04cf455a999c","Type":"ContainerDied","Data":"d35c06a49219a6b0323a0eb4012e56cbdb9cab6c35fc49e5fe804ba39f92987e"} Dec 10 13:07:53 crc kubenswrapper[4921]: I1210 13:07:53.897725 4921 generic.go:334] "Generic (PLEG): container finished" podID="1d3566b0-e028-4148-a7be-04cf455a999c" containerID="476b212e7edc8e42f2511958175978d8e995de419cca5e235c02cac004313156" exitCode=0 Dec 10 13:07:53 crc kubenswrapper[4921]: I1210 13:07:53.897769 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" event={"ID":"1d3566b0-e028-4148-a7be-04cf455a999c","Type":"ContainerDied","Data":"476b212e7edc8e42f2511958175978d8e995de419cca5e235c02cac004313156"} Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.104180 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.228225 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-util\") pod \"1d3566b0-e028-4148-a7be-04cf455a999c\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.228304 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn4lh\" (UniqueName: \"kubernetes.io/projected/1d3566b0-e028-4148-a7be-04cf455a999c-kube-api-access-tn4lh\") pod \"1d3566b0-e028-4148-a7be-04cf455a999c\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.228451 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-bundle\") pod \"1d3566b0-e028-4148-a7be-04cf455a999c\" (UID: \"1d3566b0-e028-4148-a7be-04cf455a999c\") " Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.230090 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-bundle" (OuterVolumeSpecName: "bundle") pod "1d3566b0-e028-4148-a7be-04cf455a999c" (UID: "1d3566b0-e028-4148-a7be-04cf455a999c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.240295 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d3566b0-e028-4148-a7be-04cf455a999c-kube-api-access-tn4lh" (OuterVolumeSpecName: "kube-api-access-tn4lh") pod "1d3566b0-e028-4148-a7be-04cf455a999c" (UID: "1d3566b0-e028-4148-a7be-04cf455a999c"). InnerVolumeSpecName "kube-api-access-tn4lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.250517 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-util" (OuterVolumeSpecName: "util") pod "1d3566b0-e028-4148-a7be-04cf455a999c" (UID: "1d3566b0-e028-4148-a7be-04cf455a999c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.329685 4921 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-util\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.329716 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn4lh\" (UniqueName: \"kubernetes.io/projected/1d3566b0-e028-4148-a7be-04cf455a999c-kube-api-access-tn4lh\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.329726 4921 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1d3566b0-e028-4148-a7be-04cf455a999c-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.910677 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" event={"ID":"1d3566b0-e028-4148-a7be-04cf455a999c","Type":"ContainerDied","Data":"961579a177356a9cf45714bc7b0ef97438e549f6d6c948149b34bf76aa9c3a49"} Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.910717 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="961579a177356a9cf45714bc7b0ef97438e549f6d6c948149b34bf76aa9c3a49" Dec 10 13:07:55 crc kubenswrapper[4921]: I1210 13:07:55.910746 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.517972 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7"] Dec 10 13:08:04 crc kubenswrapper[4921]: E1210 13:08:04.519560 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.519650 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" Dec 10 13:08:04 crc kubenswrapper[4921]: E1210 13:08:04.519711 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="pull" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.519759 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="pull" Dec 10 13:08:04 crc kubenswrapper[4921]: E1210 13:08:04.519815 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="util" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.519867 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="util" Dec 10 13:08:04 crc kubenswrapper[4921]: E1210 13:08:04.519921 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="extract" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.519968 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="extract" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.520100 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2115a5-1371-4a19-b1e8-7f93a7719a71" containerName="console" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.520154 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d3566b0-e028-4148-a7be-04cf455a999c" containerName="extract" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.520575 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.523267 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.523690 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.523867 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-dmm57" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.524335 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.538788 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.544733 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7"] Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.639782 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6344fb01-da13-44f0-997e-8995611d7d49-webhook-cert\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.640021 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5jlv\" (UniqueName: \"kubernetes.io/projected/6344fb01-da13-44f0-997e-8995611d7d49-kube-api-access-x5jlv\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.640136 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6344fb01-da13-44f0-997e-8995611d7d49-apiservice-cert\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.741208 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6344fb01-da13-44f0-997e-8995611d7d49-webhook-cert\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.742368 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5jlv\" (UniqueName: \"kubernetes.io/projected/6344fb01-da13-44f0-997e-8995611d7d49-kube-api-access-x5jlv\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.742738 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6344fb01-da13-44f0-997e-8995611d7d49-apiservice-cert\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.747455 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6344fb01-da13-44f0-997e-8995611d7d49-apiservice-cert\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.760223 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g"] Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.761032 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.762512 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6344fb01-da13-44f0-997e-8995611d7d49-webhook-cert\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.765791 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.766027 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.766213 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-cm24v" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.775155 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5jlv\" (UniqueName: \"kubernetes.io/projected/6344fb01-da13-44f0-997e-8995611d7d49-kube-api-access-x5jlv\") pod \"metallb-operator-controller-manager-dcc9644d6-vngw7\" (UID: \"6344fb01-da13-44f0-997e-8995611d7d49\") " pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.794330 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g"] Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.838908 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.844317 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/be00220d-bc15-4e8f-95be-44043850aac1-webhook-cert\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.844347 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/be00220d-bc15-4e8f-95be-44043850aac1-apiservice-cert\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.844427 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbc7n\" (UniqueName: \"kubernetes.io/projected/be00220d-bc15-4e8f-95be-44043850aac1-kube-api-access-jbc7n\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.953131 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbc7n\" (UniqueName: \"kubernetes.io/projected/be00220d-bc15-4e8f-95be-44043850aac1-kube-api-access-jbc7n\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.953453 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/be00220d-bc15-4e8f-95be-44043850aac1-webhook-cert\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.953754 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/be00220d-bc15-4e8f-95be-44043850aac1-apiservice-cert\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.957947 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/be00220d-bc15-4e8f-95be-44043850aac1-webhook-cert\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:04 crc kubenswrapper[4921]: I1210 13:08:04.993997 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/be00220d-bc15-4e8f-95be-44043850aac1-apiservice-cert\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:05 crc kubenswrapper[4921]: I1210 13:08:05.004299 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbc7n\" (UniqueName: \"kubernetes.io/projected/be00220d-bc15-4e8f-95be-44043850aac1-kube-api-access-jbc7n\") pod \"metallb-operator-webhook-server-55b8dfdccf-g755g\" (UID: \"be00220d-bc15-4e8f-95be-44043850aac1\") " pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:05 crc kubenswrapper[4921]: I1210 13:08:05.121324 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:05 crc kubenswrapper[4921]: I1210 13:08:05.161468 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7"] Dec 10 13:08:05 crc kubenswrapper[4921]: I1210 13:08:05.394496 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g"] Dec 10 13:08:05 crc kubenswrapper[4921]: W1210 13:08:05.402589 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe00220d_bc15_4e8f_95be_44043850aac1.slice/crio-df3838a66a1866941aa2bd826e174d71a64a69e3ae17411203bf9994eaf76ff1 WatchSource:0}: Error finding container df3838a66a1866941aa2bd826e174d71a64a69e3ae17411203bf9994eaf76ff1: Status 404 returned error can't find the container with id df3838a66a1866941aa2bd826e174d71a64a69e3ae17411203bf9994eaf76ff1 Dec 10 13:08:05 crc kubenswrapper[4921]: I1210 13:08:05.980205 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" event={"ID":"be00220d-bc15-4e8f-95be-44043850aac1","Type":"ContainerStarted","Data":"df3838a66a1866941aa2bd826e174d71a64a69e3ae17411203bf9994eaf76ff1"} Dec 10 13:08:05 crc kubenswrapper[4921]: I1210 13:08:05.982269 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" event={"ID":"6344fb01-da13-44f0-997e-8995611d7d49","Type":"ContainerStarted","Data":"6c745401ccd557310932212cc79b34e2ca8e31c806913dd109ecb11142899ca7"} Dec 10 13:08:12 crc kubenswrapper[4921]: I1210 13:08:12.021056 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" event={"ID":"be00220d-bc15-4e8f-95be-44043850aac1","Type":"ContainerStarted","Data":"a191a2ab42eef4610dbc138660419a766e8d6cde02e22b22330ed8e9d18aff64"} Dec 10 13:08:12 crc kubenswrapper[4921]: I1210 13:08:12.021692 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:12 crc kubenswrapper[4921]: I1210 13:08:12.022424 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" event={"ID":"6344fb01-da13-44f0-997e-8995611d7d49","Type":"ContainerStarted","Data":"77ae745e848c363bc1e405d588ab32d1b47f4c1b7bd72f14f2aca3ec4a4fd611"} Dec 10 13:08:12 crc kubenswrapper[4921]: I1210 13:08:12.022797 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:12 crc kubenswrapper[4921]: I1210 13:08:12.049825 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" podStartSLOduration=2.296543661 podStartE2EDuration="8.04980223s" podCreationTimestamp="2025-12-10 13:08:04 +0000 UTC" firstStartedPulling="2025-12-10 13:08:05.407457127 +0000 UTC m=+682.623679061" lastFinishedPulling="2025-12-10 13:08:11.160715706 +0000 UTC m=+688.376937630" observedRunningTime="2025-12-10 13:08:12.045115846 +0000 UTC m=+689.261337790" watchObservedRunningTime="2025-12-10 13:08:12.04980223 +0000 UTC m=+689.266024154" Dec 10 13:08:12 crc kubenswrapper[4921]: I1210 13:08:12.086594 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" podStartSLOduration=2.104814957 podStartE2EDuration="8.086571077s" podCreationTimestamp="2025-12-10 13:08:04 +0000 UTC" firstStartedPulling="2025-12-10 13:08:05.172606717 +0000 UTC m=+682.388828641" lastFinishedPulling="2025-12-10 13:08:11.154362837 +0000 UTC m=+688.370584761" observedRunningTime="2025-12-10 13:08:12.080023193 +0000 UTC m=+689.296245137" watchObservedRunningTime="2025-12-10 13:08:12.086571077 +0000 UTC m=+689.302793021" Dec 10 13:08:16 crc kubenswrapper[4921]: I1210 13:08:16.711252 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:08:16 crc kubenswrapper[4921]: I1210 13:08:16.711646 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:08:25 crc kubenswrapper[4921]: I1210 13:08:25.136286 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-55b8dfdccf-g755g" Dec 10 13:08:27 crc kubenswrapper[4921]: I1210 13:08:27.883068 4921 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 13:08:44 crc kubenswrapper[4921]: I1210 13:08:44.842610 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-dcc9644d6-vngw7" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.571184 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-gbfb5"] Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.573474 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.576226 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.576343 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-wjwlv" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.576381 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.579788 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr"] Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.580489 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.583165 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.598833 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr"] Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.700211 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-nstdr"] Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.701280 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.703998 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.704732 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.705658 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4djrb" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.705942 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.728021 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-spclt"] Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.729090 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.733454 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.746882 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-spclt"] Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758261 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-reloader\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758312 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aefe835b-88a0-4bfc-9358-601b13418414-frr-startup\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758352 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-metrics\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758375 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9a1c6903-fed0-48be-b83c-ec8416ee2204-metallb-excludel2\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758435 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/182ccfa3-c32b-4dca-9876-770f89d8eda5-cert\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758520 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7421a4d2-6416-4ad9-b177-6644f804b950-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-kpktr\" (UID: \"7421a4d2-6416-4ad9-b177-6644f804b950\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758573 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-metrics-certs\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758672 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758740 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-frr-sockets\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758769 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/182ccfa3-c32b-4dca-9876-770f89d8eda5-metrics-certs\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758806 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhl2r\" (UniqueName: \"kubernetes.io/projected/7421a4d2-6416-4ad9-b177-6644f804b950-kube-api-access-zhl2r\") pod \"frr-k8s-webhook-server-7fcb986d4-kpktr\" (UID: \"7421a4d2-6416-4ad9-b177-6644f804b950\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758831 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnqms\" (UniqueName: \"kubernetes.io/projected/9a1c6903-fed0-48be-b83c-ec8416ee2204-kube-api-access-gnqms\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758861 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q69gk\" (UniqueName: \"kubernetes.io/projected/aefe835b-88a0-4bfc-9358-601b13418414-kube-api-access-q69gk\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758879 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v59hf\" (UniqueName: \"kubernetes.io/projected/182ccfa3-c32b-4dca-9876-770f89d8eda5-kube-api-access-v59hf\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758939 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-frr-conf\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.758974 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aefe835b-88a0-4bfc-9358-601b13418414-metrics-certs\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859360 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859418 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-frr-sockets\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859438 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/182ccfa3-c32b-4dca-9876-770f89d8eda5-metrics-certs\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859462 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhl2r\" (UniqueName: \"kubernetes.io/projected/7421a4d2-6416-4ad9-b177-6644f804b950-kube-api-access-zhl2r\") pod \"frr-k8s-webhook-server-7fcb986d4-kpktr\" (UID: \"7421a4d2-6416-4ad9-b177-6644f804b950\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859481 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnqms\" (UniqueName: \"kubernetes.io/projected/9a1c6903-fed0-48be-b83c-ec8416ee2204-kube-api-access-gnqms\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859500 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q69gk\" (UniqueName: \"kubernetes.io/projected/aefe835b-88a0-4bfc-9358-601b13418414-kube-api-access-q69gk\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859514 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v59hf\" (UniqueName: \"kubernetes.io/projected/182ccfa3-c32b-4dca-9876-770f89d8eda5-kube-api-access-v59hf\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859533 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-frr-conf\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: E1210 13:08:45.859537 4921 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 10 13:08:45 crc kubenswrapper[4921]: E1210 13:08:45.859616 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist podName:9a1c6903-fed0-48be-b83c-ec8416ee2204 nodeName:}" failed. No retries permitted until 2025-12-10 13:08:46.359598477 +0000 UTC m=+723.575820401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist") pod "speaker-nstdr" (UID: "9a1c6903-fed0-48be-b83c-ec8416ee2204") : secret "metallb-memberlist" not found Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859550 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aefe835b-88a0-4bfc-9358-601b13418414-metrics-certs\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859714 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-reloader\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859737 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aefe835b-88a0-4bfc-9358-601b13418414-frr-startup\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859777 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-metrics\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859794 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9a1c6903-fed0-48be-b83c-ec8416ee2204-metallb-excludel2\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859816 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/182ccfa3-c32b-4dca-9876-770f89d8eda5-cert\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859841 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7421a4d2-6416-4ad9-b177-6644f804b950-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-kpktr\" (UID: \"7421a4d2-6416-4ad9-b177-6644f804b950\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.859864 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-metrics-certs\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: E1210 13:08:45.859955 4921 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 10 13:08:45 crc kubenswrapper[4921]: E1210 13:08:45.859979 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-metrics-certs podName:9a1c6903-fed0-48be-b83c-ec8416ee2204 nodeName:}" failed. No retries permitted until 2025-12-10 13:08:46.359970747 +0000 UTC m=+723.576192671 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-metrics-certs") pod "speaker-nstdr" (UID: "9a1c6903-fed0-48be-b83c-ec8416ee2204") : secret "speaker-certs-secret" not found Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.860299 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-frr-conf\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.860480 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-metrics\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.860656 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-reloader\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.860875 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aefe835b-88a0-4bfc-9358-601b13418414-frr-sockets\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.860994 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9a1c6903-fed0-48be-b83c-ec8416ee2204-metallb-excludel2\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.861356 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aefe835b-88a0-4bfc-9358-601b13418414-frr-startup\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.863223 4921 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.866077 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/182ccfa3-c32b-4dca-9876-770f89d8eda5-metrics-certs\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.874304 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/182ccfa3-c32b-4dca-9876-770f89d8eda5-cert\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.874491 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aefe835b-88a0-4bfc-9358-601b13418414-metrics-certs\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.882986 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnqms\" (UniqueName: \"kubernetes.io/projected/9a1c6903-fed0-48be-b83c-ec8416ee2204-kube-api-access-gnqms\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.883005 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7421a4d2-6416-4ad9-b177-6644f804b950-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-kpktr\" (UID: \"7421a4d2-6416-4ad9-b177-6644f804b950\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.886984 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhl2r\" (UniqueName: \"kubernetes.io/projected/7421a4d2-6416-4ad9-b177-6644f804b950-kube-api-access-zhl2r\") pod \"frr-k8s-webhook-server-7fcb986d4-kpktr\" (UID: \"7421a4d2-6416-4ad9-b177-6644f804b950\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.887712 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v59hf\" (UniqueName: \"kubernetes.io/projected/182ccfa3-c32b-4dca-9876-770f89d8eda5-kube-api-access-v59hf\") pod \"controller-f8648f98b-spclt\" (UID: \"182ccfa3-c32b-4dca-9876-770f89d8eda5\") " pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.890809 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q69gk\" (UniqueName: \"kubernetes.io/projected/aefe835b-88a0-4bfc-9358-601b13418414-kube-api-access-q69gk\") pod \"frr-k8s-gbfb5\" (UID: \"aefe835b-88a0-4bfc-9358-601b13418414\") " pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:45 crc kubenswrapper[4921]: I1210 13:08:45.895774 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.044529 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.189772 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.348784 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr"] Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.365479 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-metrics-certs\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.365519 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:46 crc kubenswrapper[4921]: E1210 13:08:46.365658 4921 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 10 13:08:46 crc kubenswrapper[4921]: E1210 13:08:46.365709 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist podName:9a1c6903-fed0-48be-b83c-ec8416ee2204 nodeName:}" failed. No retries permitted until 2025-12-10 13:08:47.365695721 +0000 UTC m=+724.581917645 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist") pod "speaker-nstdr" (UID: "9a1c6903-fed0-48be-b83c-ec8416ee2204") : secret "metallb-memberlist" not found Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.373447 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-metrics-certs\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.455958 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-spclt"] Dec 10 13:08:46 crc kubenswrapper[4921]: W1210 13:08:46.455963 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod182ccfa3_c32b_4dca_9876_770f89d8eda5.slice/crio-a8df4ecf034f7ac8421d4fc2bbac417a14ed424081e8a832037d061cdd9f6e01 WatchSource:0}: Error finding container a8df4ecf034f7ac8421d4fc2bbac417a14ed424081e8a832037d061cdd9f6e01: Status 404 returned error can't find the container with id a8df4ecf034f7ac8421d4fc2bbac417a14ed424081e8a832037d061cdd9f6e01 Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.710963 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:08:46 crc kubenswrapper[4921]: I1210 13:08:46.711011 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.219224 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" event={"ID":"7421a4d2-6416-4ad9-b177-6644f804b950","Type":"ContainerStarted","Data":"ab9bcff17759fa326b5e0e678fb794096926cb65c456fd6627feed1959252112"} Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.221729 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-spclt" event={"ID":"182ccfa3-c32b-4dca-9876-770f89d8eda5","Type":"ContainerStarted","Data":"a95afe785df3bbf67d4ff26e76f7dc7fb857539719231e6777e82a8b17d56518"} Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.221759 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-spclt" event={"ID":"182ccfa3-c32b-4dca-9876-770f89d8eda5","Type":"ContainerStarted","Data":"af0379b9f69d8120fee0abc569cc382602e8b9df4a79fb4a1c895ef6966efdb3"} Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.221770 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-spclt" event={"ID":"182ccfa3-c32b-4dca-9876-770f89d8eda5","Type":"ContainerStarted","Data":"a8df4ecf034f7ac8421d4fc2bbac417a14ed424081e8a832037d061cdd9f6e01"} Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.222486 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.223323 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"913fbc985f123e94be7321461b50a94eb06d6a361e629463f9fdf7c14cb5c136"} Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.239913 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-spclt" podStartSLOduration=2.239894918 podStartE2EDuration="2.239894918s" podCreationTimestamp="2025-12-10 13:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:08:47.23848435 +0000 UTC m=+724.454706294" watchObservedRunningTime="2025-12-10 13:08:47.239894918 +0000 UTC m=+724.456116842" Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.377848 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.382973 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9a1c6903-fed0-48be-b83c-ec8416ee2204-memberlist\") pod \"speaker-nstdr\" (UID: \"9a1c6903-fed0-48be-b83c-ec8416ee2204\") " pod="metallb-system/speaker-nstdr" Dec 10 13:08:47 crc kubenswrapper[4921]: I1210 13:08:47.516865 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nstdr" Dec 10 13:08:48 crc kubenswrapper[4921]: I1210 13:08:48.236982 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nstdr" event={"ID":"9a1c6903-fed0-48be-b83c-ec8416ee2204","Type":"ContainerStarted","Data":"40cdb55572ca2dd93d9b0a7dcd2801f13990d2a7d09af6d2a300ce3b4494eee0"} Dec 10 13:08:48 crc kubenswrapper[4921]: I1210 13:08:48.237305 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nstdr" event={"ID":"9a1c6903-fed0-48be-b83c-ec8416ee2204","Type":"ContainerStarted","Data":"0c97c8ac6566e64e2742757cd473325c51ceb4ea5c62270e305c4a148208a2b3"} Dec 10 13:08:48 crc kubenswrapper[4921]: I1210 13:08:48.237316 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nstdr" event={"ID":"9a1c6903-fed0-48be-b83c-ec8416ee2204","Type":"ContainerStarted","Data":"1842de22596a362faa619073da28e702506c27d8293314574bb9e0a9cfce3d99"} Dec 10 13:08:48 crc kubenswrapper[4921]: I1210 13:08:48.237477 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-nstdr" Dec 10 13:08:48 crc kubenswrapper[4921]: I1210 13:08:48.259324 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-nstdr" podStartSLOduration=3.25930337 podStartE2EDuration="3.25930337s" podCreationTimestamp="2025-12-10 13:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:08:48.253778792 +0000 UTC m=+725.470000726" watchObservedRunningTime="2025-12-10 13:08:48.25930337 +0000 UTC m=+725.475525314" Dec 10 13:08:54 crc kubenswrapper[4921]: I1210 13:08:54.283264 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" event={"ID":"7421a4d2-6416-4ad9-b177-6644f804b950","Type":"ContainerStarted","Data":"85c853ef7b6098b5d9d02124c02aef81aec507beb6dbc6231ebe1865e2a7fa16"} Dec 10 13:08:54 crc kubenswrapper[4921]: I1210 13:08:54.283826 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:08:54 crc kubenswrapper[4921]: I1210 13:08:54.284775 4921 generic.go:334] "Generic (PLEG): container finished" podID="aefe835b-88a0-4bfc-9358-601b13418414" containerID="371dedff8b1221c8269f101bc0abed5c4f5ffcca88b7ea979b672e4629bd5b41" exitCode=0 Dec 10 13:08:54 crc kubenswrapper[4921]: I1210 13:08:54.284815 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerDied","Data":"371dedff8b1221c8269f101bc0abed5c4f5ffcca88b7ea979b672e4629bd5b41"} Dec 10 13:08:54 crc kubenswrapper[4921]: I1210 13:08:54.300545 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" podStartSLOduration=1.8035654060000001 podStartE2EDuration="9.30053045s" podCreationTimestamp="2025-12-10 13:08:45 +0000 UTC" firstStartedPulling="2025-12-10 13:08:46.376941903 +0000 UTC m=+723.593163827" lastFinishedPulling="2025-12-10 13:08:53.873906947 +0000 UTC m=+731.090128871" observedRunningTime="2025-12-10 13:08:54.298031663 +0000 UTC m=+731.514253647" watchObservedRunningTime="2025-12-10 13:08:54.30053045 +0000 UTC m=+731.516752384" Dec 10 13:08:55 crc kubenswrapper[4921]: I1210 13:08:55.291650 4921 generic.go:334] "Generic (PLEG): container finished" podID="aefe835b-88a0-4bfc-9358-601b13418414" containerID="6de5bd1a2148856300cfa124c5f8747edc9598d98854e692e78de2bafa381b5a" exitCode=0 Dec 10 13:08:55 crc kubenswrapper[4921]: I1210 13:08:55.291780 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerDied","Data":"6de5bd1a2148856300cfa124c5f8747edc9598d98854e692e78de2bafa381b5a"} Dec 10 13:08:56 crc kubenswrapper[4921]: I1210 13:08:56.050005 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-spclt" Dec 10 13:08:56 crc kubenswrapper[4921]: I1210 13:08:56.297954 4921 generic.go:334] "Generic (PLEG): container finished" podID="aefe835b-88a0-4bfc-9358-601b13418414" containerID="97c96fc787168cc4bd7be43241c24e7546282200b8b911f398f2b89abd8a66f9" exitCode=0 Dec 10 13:08:56 crc kubenswrapper[4921]: I1210 13:08:56.298003 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerDied","Data":"97c96fc787168cc4bd7be43241c24e7546282200b8b911f398f2b89abd8a66f9"} Dec 10 13:08:57 crc kubenswrapper[4921]: I1210 13:08:57.309284 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"093205a863b39856bddfa66d0b50d5c67b702798ed97fd92dffd5ff3a031f661"} Dec 10 13:08:57 crc kubenswrapper[4921]: I1210 13:08:57.309594 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"ae3fe86d0ad782f07abce8eed36f8aee8233459d02b08c2682b1fc726b56e2c8"} Dec 10 13:08:57 crc kubenswrapper[4921]: I1210 13:08:57.309608 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"b840c0463c344a4d75b3cdcc64fd5f49d2b8d2402ce324e6325ce78a17d2ed56"} Dec 10 13:08:57 crc kubenswrapper[4921]: I1210 13:08:57.309619 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"f9aafc0e35e5b76e254e6097371b601b019a182db0c71dd392102fbb4d0c670a"} Dec 10 13:08:57 crc kubenswrapper[4921]: I1210 13:08:57.521019 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-nstdr" Dec 10 13:08:58 crc kubenswrapper[4921]: I1210 13:08:58.318437 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"305566ae907e16ac80d710d3d5e50ccf9781a73977957935f453fc68ed0a0dd4"} Dec 10 13:08:58 crc kubenswrapper[4921]: I1210 13:08:58.319300 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gbfb5" event={"ID":"aefe835b-88a0-4bfc-9358-601b13418414","Type":"ContainerStarted","Data":"14ff7f86903bd594d884f04ad9bce20c16070fc6602853e8332ef9f8cfd323d3"} Dec 10 13:08:58 crc kubenswrapper[4921]: I1210 13:08:58.320237 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:08:58 crc kubenswrapper[4921]: I1210 13:08:58.342345 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-gbfb5" podStartSLOduration=5.890937362 podStartE2EDuration="13.342329284s" podCreationTimestamp="2025-12-10 13:08:45 +0000 UTC" firstStartedPulling="2025-12-10 13:08:46.37495894 +0000 UTC m=+723.591180864" lastFinishedPulling="2025-12-10 13:08:53.826350812 +0000 UTC m=+731.042572786" observedRunningTime="2025-12-10 13:08:58.337982757 +0000 UTC m=+735.554204691" watchObservedRunningTime="2025-12-10 13:08:58.342329284 +0000 UTC m=+735.558551208" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.389102 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-5c86v"] Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.390365 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.392702 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.392788 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.393528 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-6cl82" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.454758 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5c86v"] Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.471595 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lxh6\" (UniqueName: \"kubernetes.io/projected/5fbee7a8-66f6-4176-8048-693f67e58d72-kube-api-access-7lxh6\") pod \"openstack-operator-index-5c86v\" (UID: \"5fbee7a8-66f6-4176-8048-693f67e58d72\") " pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.572906 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lxh6\" (UniqueName: \"kubernetes.io/projected/5fbee7a8-66f6-4176-8048-693f67e58d72-kube-api-access-7lxh6\") pod \"openstack-operator-index-5c86v\" (UID: \"5fbee7a8-66f6-4176-8048-693f67e58d72\") " pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.589710 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lxh6\" (UniqueName: \"kubernetes.io/projected/5fbee7a8-66f6-4176-8048-693f67e58d72-kube-api-access-7lxh6\") pod \"openstack-operator-index-5c86v\" (UID: \"5fbee7a8-66f6-4176-8048-693f67e58d72\") " pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:00 crc kubenswrapper[4921]: I1210 13:09:00.708483 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:01 crc kubenswrapper[4921]: I1210 13:09:01.105970 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5c86v"] Dec 10 13:09:01 crc kubenswrapper[4921]: I1210 13:09:01.190172 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:09:01 crc kubenswrapper[4921]: I1210 13:09:01.231480 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:09:01 crc kubenswrapper[4921]: I1210 13:09:01.336984 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5c86v" event={"ID":"5fbee7a8-66f6-4176-8048-693f67e58d72","Type":"ContainerStarted","Data":"4a7d3abce687aef7e6726e6f8b259146d623d659a5aa99646d7ef4aee33a7e05"} Dec 10 13:09:03 crc kubenswrapper[4921]: I1210 13:09:03.770750 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5c86v"] Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.356914 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5c86v" event={"ID":"5fbee7a8-66f6-4176-8048-693f67e58d72","Type":"ContainerStarted","Data":"7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b"} Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.373835 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-5c86v" podStartSLOduration=1.9046166169999998 podStartE2EDuration="4.373816752s" podCreationTimestamp="2025-12-10 13:09:00 +0000 UTC" firstStartedPulling="2025-12-10 13:09:01.102636267 +0000 UTC m=+738.318858201" lastFinishedPulling="2025-12-10 13:09:03.571836412 +0000 UTC m=+740.788058336" observedRunningTime="2025-12-10 13:09:04.371412128 +0000 UTC m=+741.587634062" watchObservedRunningTime="2025-12-10 13:09:04.373816752 +0000 UTC m=+741.590038676" Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.378668 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6k82p"] Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.379466 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.399829 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6k82p"] Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.525489 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvz8w\" (UniqueName: \"kubernetes.io/projected/122d72c1-1bef-4ecb-90bb-424d2b989b29-kube-api-access-lvz8w\") pod \"openstack-operator-index-6k82p\" (UID: \"122d72c1-1bef-4ecb-90bb-424d2b989b29\") " pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.627075 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvz8w\" (UniqueName: \"kubernetes.io/projected/122d72c1-1bef-4ecb-90bb-424d2b989b29-kube-api-access-lvz8w\") pod \"openstack-operator-index-6k82p\" (UID: \"122d72c1-1bef-4ecb-90bb-424d2b989b29\") " pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.645302 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvz8w\" (UniqueName: \"kubernetes.io/projected/122d72c1-1bef-4ecb-90bb-424d2b989b29-kube-api-access-lvz8w\") pod \"openstack-operator-index-6k82p\" (UID: \"122d72c1-1bef-4ecb-90bb-424d2b989b29\") " pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:04 crc kubenswrapper[4921]: I1210 13:09:04.692376 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.110528 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6k82p"] Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.363990 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-5c86v" podUID="5fbee7a8-66f6-4176-8048-693f67e58d72" containerName="registry-server" containerID="cri-o://7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b" gracePeriod=2 Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.364456 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6k82p" event={"ID":"122d72c1-1bef-4ecb-90bb-424d2b989b29","Type":"ContainerStarted","Data":"40a1edc241d287c77cd1fc6db01af9ad5dc5665878ef83a536afb82ed43c53c5"} Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.364700 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6k82p" event={"ID":"122d72c1-1bef-4ecb-90bb-424d2b989b29","Type":"ContainerStarted","Data":"077efa1b9e523eaf824174c251795657925485a644f7ddc0843b6f4d886adbb2"} Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.384193 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6k82p" podStartSLOduration=1.321083169 podStartE2EDuration="1.384178291s" podCreationTimestamp="2025-12-10 13:09:04 +0000 UTC" firstStartedPulling="2025-12-10 13:09:05.11976953 +0000 UTC m=+742.335991454" lastFinishedPulling="2025-12-10 13:09:05.182864652 +0000 UTC m=+742.399086576" observedRunningTime="2025-12-10 13:09:05.380714438 +0000 UTC m=+742.596936362" watchObservedRunningTime="2025-12-10 13:09:05.384178291 +0000 UTC m=+742.600400215" Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.719272 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.841924 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lxh6\" (UniqueName: \"kubernetes.io/projected/5fbee7a8-66f6-4176-8048-693f67e58d72-kube-api-access-7lxh6\") pod \"5fbee7a8-66f6-4176-8048-693f67e58d72\" (UID: \"5fbee7a8-66f6-4176-8048-693f67e58d72\") " Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.847487 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fbee7a8-66f6-4176-8048-693f67e58d72-kube-api-access-7lxh6" (OuterVolumeSpecName: "kube-api-access-7lxh6") pod "5fbee7a8-66f6-4176-8048-693f67e58d72" (UID: "5fbee7a8-66f6-4176-8048-693f67e58d72"). InnerVolumeSpecName "kube-api-access-7lxh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.901040 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-kpktr" Dec 10 13:09:05 crc kubenswrapper[4921]: I1210 13:09:05.943859 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lxh6\" (UniqueName: \"kubernetes.io/projected/5fbee7a8-66f6-4176-8048-693f67e58d72-kube-api-access-7lxh6\") on node \"crc\" DevicePath \"\"" Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.192875 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-gbfb5" Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.372684 4921 generic.go:334] "Generic (PLEG): container finished" podID="5fbee7a8-66f6-4176-8048-693f67e58d72" containerID="7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b" exitCode=0 Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.372766 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5c86v" Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.372764 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5c86v" event={"ID":"5fbee7a8-66f6-4176-8048-693f67e58d72","Type":"ContainerDied","Data":"7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b"} Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.372838 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5c86v" event={"ID":"5fbee7a8-66f6-4176-8048-693f67e58d72","Type":"ContainerDied","Data":"4a7d3abce687aef7e6726e6f8b259146d623d659a5aa99646d7ef4aee33a7e05"} Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.372869 4921 scope.go:117] "RemoveContainer" containerID="7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b" Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.391965 4921 scope.go:117] "RemoveContainer" containerID="7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b" Dec 10 13:09:06 crc kubenswrapper[4921]: E1210 13:09:06.392747 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b\": container with ID starting with 7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b not found: ID does not exist" containerID="7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b" Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.392860 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b"} err="failed to get container status \"7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b\": rpc error: code = NotFound desc = could not find container \"7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b\": container with ID starting with 7308f080b5aad57e16fac2da933b7559850809961bbb80df01983efb22088c2b not found: ID does not exist" Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.415309 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5c86v"] Dec 10 13:09:06 crc kubenswrapper[4921]: I1210 13:09:06.422739 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-5c86v"] Dec 10 13:09:07 crc kubenswrapper[4921]: I1210 13:09:07.203653 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fbee7a8-66f6-4176-8048-693f67e58d72" path="/var/lib/kubelet/pods/5fbee7a8-66f6-4176-8048-693f67e58d72/volumes" Dec 10 13:09:14 crc kubenswrapper[4921]: I1210 13:09:14.692876 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:14 crc kubenswrapper[4921]: I1210 13:09:14.693638 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:14 crc kubenswrapper[4921]: I1210 13:09:14.726325 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:15 crc kubenswrapper[4921]: I1210 13:09:15.451582 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-6k82p" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.711337 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.711840 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.711910 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.712819 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"14289058d0fb041d586a9216e5a19d1f702167fc4a4034c67755ae206f4ba1ec"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.712926 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://14289058d0fb041d586a9216e5a19d1f702167fc4a4034c67755ae206f4ba1ec" gracePeriod=600 Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.823177 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps"] Dec 10 13:09:16 crc kubenswrapper[4921]: E1210 13:09:16.823435 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fbee7a8-66f6-4176-8048-693f67e58d72" containerName="registry-server" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.823455 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fbee7a8-66f6-4176-8048-693f67e58d72" containerName="registry-server" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.823606 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fbee7a8-66f6-4176-8048-693f67e58d72" containerName="registry-server" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.824586 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.829343 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-46gf9" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.837719 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps"] Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.991000 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9p5b\" (UniqueName: \"kubernetes.io/projected/afe709e3-dccf-4ca4-867a-a75748eb4bcc-kube-api-access-z9p5b\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.991330 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-util\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:16 crc kubenswrapper[4921]: I1210 13:09:16.991381 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-bundle\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.092862 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-util\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.092994 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-bundle\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.093052 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9p5b\" (UniqueName: \"kubernetes.io/projected/afe709e3-dccf-4ca4-867a-a75748eb4bcc-kube-api-access-z9p5b\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.093621 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-bundle\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.093848 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-util\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.118380 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9p5b\" (UniqueName: \"kubernetes.io/projected/afe709e3-dccf-4ca4-867a-a75748eb4bcc-kube-api-access-z9p5b\") pod \"147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.214429 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.444621 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="14289058d0fb041d586a9216e5a19d1f702167fc4a4034c67755ae206f4ba1ec" exitCode=0 Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.444665 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"14289058d0fb041d586a9216e5a19d1f702167fc4a4034c67755ae206f4ba1ec"} Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.444693 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"8d11980c56c7b436b6d741535cc469b576e206b3de67a362d6c36f2e03055365"} Dec 10 13:09:17 crc kubenswrapper[4921]: I1210 13:09:17.444709 4921 scope.go:117] "RemoveContainer" containerID="65cabfaddf4e4a7b2b469a25f75e10271c9f98df50571e3320da2cebc7ca5d27" Dec 10 13:09:18 crc kubenswrapper[4921]: I1210 13:09:18.001655 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps"] Dec 10 13:09:18 crc kubenswrapper[4921]: W1210 13:09:18.010903 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafe709e3_dccf_4ca4_867a_a75748eb4bcc.slice/crio-6c0af18045888e749a581dd525edc33ac62a4914b19a398c322f7dd34068da6b WatchSource:0}: Error finding container 6c0af18045888e749a581dd525edc33ac62a4914b19a398c322f7dd34068da6b: Status 404 returned error can't find the container with id 6c0af18045888e749a581dd525edc33ac62a4914b19a398c322f7dd34068da6b Dec 10 13:09:18 crc kubenswrapper[4921]: I1210 13:09:18.454103 4921 generic.go:334] "Generic (PLEG): container finished" podID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerID="ffba642f06af1ca522a4d785e23272acbb2947facae496a5b42074ce8d6d6f77" exitCode=0 Dec 10 13:09:18 crc kubenswrapper[4921]: I1210 13:09:18.454210 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" event={"ID":"afe709e3-dccf-4ca4-867a-a75748eb4bcc","Type":"ContainerDied","Data":"ffba642f06af1ca522a4d785e23272acbb2947facae496a5b42074ce8d6d6f77"} Dec 10 13:09:18 crc kubenswrapper[4921]: I1210 13:09:18.454434 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" event={"ID":"afe709e3-dccf-4ca4-867a-a75748eb4bcc","Type":"ContainerStarted","Data":"6c0af18045888e749a581dd525edc33ac62a4914b19a398c322f7dd34068da6b"} Dec 10 13:09:19 crc kubenswrapper[4921]: I1210 13:09:19.461712 4921 generic.go:334] "Generic (PLEG): container finished" podID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerID="39f7c91995ed5ab08899e2d14bfdb6e409242b191885617b5cf2e35751861f23" exitCode=0 Dec 10 13:09:19 crc kubenswrapper[4921]: I1210 13:09:19.461752 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" event={"ID":"afe709e3-dccf-4ca4-867a-a75748eb4bcc","Type":"ContainerDied","Data":"39f7c91995ed5ab08899e2d14bfdb6e409242b191885617b5cf2e35751861f23"} Dec 10 13:09:20 crc kubenswrapper[4921]: I1210 13:09:20.470414 4921 generic.go:334] "Generic (PLEG): container finished" podID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerID="fda4098dbb1e1daa134bf22fcb6c7a4c554e02be478ac6b42d6adc7e93ec2835" exitCode=0 Dec 10 13:09:20 crc kubenswrapper[4921]: I1210 13:09:20.470528 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" event={"ID":"afe709e3-dccf-4ca4-867a-a75748eb4bcc","Type":"ContainerDied","Data":"fda4098dbb1e1daa134bf22fcb6c7a4c554e02be478ac6b42d6adc7e93ec2835"} Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.692292 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.849722 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-util\") pod \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.849819 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9p5b\" (UniqueName: \"kubernetes.io/projected/afe709e3-dccf-4ca4-867a-a75748eb4bcc-kube-api-access-z9p5b\") pod \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.850213 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-bundle\") pod \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\" (UID: \"afe709e3-dccf-4ca4-867a-a75748eb4bcc\") " Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.851596 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-bundle" (OuterVolumeSpecName: "bundle") pod "afe709e3-dccf-4ca4-867a-a75748eb4bcc" (UID: "afe709e3-dccf-4ca4-867a-a75748eb4bcc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.856381 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe709e3-dccf-4ca4-867a-a75748eb4bcc-kube-api-access-z9p5b" (OuterVolumeSpecName: "kube-api-access-z9p5b") pod "afe709e3-dccf-4ca4-867a-a75748eb4bcc" (UID: "afe709e3-dccf-4ca4-867a-a75748eb4bcc"). InnerVolumeSpecName "kube-api-access-z9p5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.865265 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-util" (OuterVolumeSpecName: "util") pod "afe709e3-dccf-4ca4-867a-a75748eb4bcc" (UID: "afe709e3-dccf-4ca4-867a-a75748eb4bcc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.951654 4921 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-util\") on node \"crc\" DevicePath \"\"" Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.951700 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9p5b\" (UniqueName: \"kubernetes.io/projected/afe709e3-dccf-4ca4-867a-a75748eb4bcc-kube-api-access-z9p5b\") on node \"crc\" DevicePath \"\"" Dec 10 13:09:21 crc kubenswrapper[4921]: I1210 13:09:21.951717 4921 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afe709e3-dccf-4ca4-867a-a75748eb4bcc-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:09:22 crc kubenswrapper[4921]: I1210 13:09:22.483776 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" event={"ID":"afe709e3-dccf-4ca4-867a-a75748eb4bcc","Type":"ContainerDied","Data":"6c0af18045888e749a581dd525edc33ac62a4914b19a398c322f7dd34068da6b"} Dec 10 13:09:22 crc kubenswrapper[4921]: I1210 13:09:22.484120 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c0af18045888e749a581dd525edc33ac62a4914b19a398c322f7dd34068da6b" Dec 10 13:09:22 crc kubenswrapper[4921]: I1210 13:09:22.483855 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.918453 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5"] Dec 10 13:09:28 crc kubenswrapper[4921]: E1210 13:09:28.919026 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="extract" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.919042 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="extract" Dec 10 13:09:28 crc kubenswrapper[4921]: E1210 13:09:28.919055 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="pull" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.919062 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="pull" Dec 10 13:09:28 crc kubenswrapper[4921]: E1210 13:09:28.919071 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="util" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.919078 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="util" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.919193 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe709e3-dccf-4ca4-867a-a75748eb4bcc" containerName="extract" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.919685 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.923471 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-spq6k" Dec 10 13:09:28 crc kubenswrapper[4921]: I1210 13:09:28.943942 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5"] Dec 10 13:09:29 crc kubenswrapper[4921]: I1210 13:09:29.048016 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lrg8\" (UniqueName: \"kubernetes.io/projected/5c70fefc-8487-4eb5-9e13-a583bb152dec-kube-api-access-5lrg8\") pod \"openstack-operator-controller-operator-9dffbf557-rz2h5\" (UID: \"5c70fefc-8487-4eb5-9e13-a583bb152dec\") " pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:29 crc kubenswrapper[4921]: I1210 13:09:29.149104 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lrg8\" (UniqueName: \"kubernetes.io/projected/5c70fefc-8487-4eb5-9e13-a583bb152dec-kube-api-access-5lrg8\") pod \"openstack-operator-controller-operator-9dffbf557-rz2h5\" (UID: \"5c70fefc-8487-4eb5-9e13-a583bb152dec\") " pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:29 crc kubenswrapper[4921]: I1210 13:09:29.172876 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lrg8\" (UniqueName: \"kubernetes.io/projected/5c70fefc-8487-4eb5-9e13-a583bb152dec-kube-api-access-5lrg8\") pod \"openstack-operator-controller-operator-9dffbf557-rz2h5\" (UID: \"5c70fefc-8487-4eb5-9e13-a583bb152dec\") " pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:29 crc kubenswrapper[4921]: I1210 13:09:29.237635 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:29 crc kubenswrapper[4921]: I1210 13:09:29.696629 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5"] Dec 10 13:09:30 crc kubenswrapper[4921]: I1210 13:09:30.536003 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" event={"ID":"5c70fefc-8487-4eb5-9e13-a583bb152dec","Type":"ContainerStarted","Data":"a03bb07eea7d14575b23a120ec37498e0e834530b192eb570eba5dd7a343cfda"} Dec 10 13:09:34 crc kubenswrapper[4921]: I1210 13:09:34.562160 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" event={"ID":"5c70fefc-8487-4eb5-9e13-a583bb152dec","Type":"ContainerStarted","Data":"2d4d13680d0d071dd90e31de34c809140c423dd7e78851f1b5300cad609179a4"} Dec 10 13:09:34 crc kubenswrapper[4921]: I1210 13:09:34.562783 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:34 crc kubenswrapper[4921]: I1210 13:09:34.586618 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" podStartSLOduration=2.600114042 podStartE2EDuration="6.586597182s" podCreationTimestamp="2025-12-10 13:09:28 +0000 UTC" firstStartedPulling="2025-12-10 13:09:29.710979294 +0000 UTC m=+766.927201218" lastFinishedPulling="2025-12-10 13:09:33.697462444 +0000 UTC m=+770.913684358" observedRunningTime="2025-12-10 13:09:34.583109778 +0000 UTC m=+771.799331712" watchObservedRunningTime="2025-12-10 13:09:34.586597182 +0000 UTC m=+771.802819116" Dec 10 13:09:39 crc kubenswrapper[4921]: I1210 13:09:39.241819 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-9dffbf557-rz2h5" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.880634 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.883597 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.888655 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.889566 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.891401 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-f5lbd" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.891474 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-dktfd" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.897576 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.910982 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.935127 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.936619 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.940859 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-29zz7" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.941932 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.942939 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.946099 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.946434 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-t5mqg" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.948259 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.950044 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-cb4jc" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.974735 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.976014 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.981157 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-vpds2" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.989472 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.994365 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46"] Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.994574 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5rhs\" (UniqueName: \"kubernetes.io/projected/488ba5bf-b48b-42f3-ba24-eba12c38a5cb-kube-api-access-j5rhs\") pod \"cinder-operator-controller-manager-6c677c69b-f8zhr\" (UID: \"488ba5bf-b48b-42f3-ba24-eba12c38a5cb\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:09:58 crc kubenswrapper[4921]: I1210 13:09:58.994635 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ln6b\" (UniqueName: \"kubernetes.io/projected/dde58658-03d9-43dc-8fe5-4be3a607934b-kube-api-access-2ln6b\") pod \"barbican-operator-controller-manager-7d9dfd778-2jpm7\" (UID: \"dde58658-03d9-43dc-8fe5-4be3a607934b\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.018575 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.052081 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.053068 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.059789 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.059977 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mjtdg" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.067928 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.098447 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099458 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099518 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t4lx\" (UniqueName: \"kubernetes.io/projected/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-kube-api-access-9t4lx\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099546 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ln6b\" (UniqueName: \"kubernetes.io/projected/dde58658-03d9-43dc-8fe5-4be3a607934b-kube-api-access-2ln6b\") pod \"barbican-operator-controller-manager-7d9dfd778-2jpm7\" (UID: \"dde58658-03d9-43dc-8fe5-4be3a607934b\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099591 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njkqv\" (UniqueName: \"kubernetes.io/projected/32c622c7-cf8b-4b25-836e-c13f5c35dde1-kube-api-access-njkqv\") pod \"glance-operator-controller-manager-5697bb5779-xqfkp\" (UID: \"32c622c7-cf8b-4b25-836e-c13f5c35dde1\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099615 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85r2l\" (UniqueName: \"kubernetes.io/projected/b297d26d-5199-4ebf-b8ad-5ca6f5e53e86-kube-api-access-85r2l\") pod \"designate-operator-controller-manager-697fb699cf-vfzb5\" (UID: \"b297d26d-5199-4ebf-b8ad-5ca6f5e53e86\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099634 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84ch7\" (UniqueName: \"kubernetes.io/projected/1ac6c721-b338-46f7-943e-63f5db2bd354-kube-api-access-84ch7\") pod \"heat-operator-controller-manager-5f64f6f8bb-jbx46\" (UID: \"1ac6c721-b338-46f7-943e-63f5db2bd354\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099655 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpwlh\" (UniqueName: \"kubernetes.io/projected/4f0cbb29-bf13-4073-8c9d-28da25a1fbba-kube-api-access-bpwlh\") pod \"horizon-operator-controller-manager-68c6d99b8f-wnpbm\" (UID: \"4f0cbb29-bf13-4073-8c9d-28da25a1fbba\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099693 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5rhs\" (UniqueName: \"kubernetes.io/projected/488ba5bf-b48b-42f3-ba24-eba12c38a5cb-kube-api-access-j5rhs\") pod \"cinder-operator-controller-manager-6c677c69b-f8zhr\" (UID: \"488ba5bf-b48b-42f3-ba24-eba12c38a5cb\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.099988 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.111105 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.112348 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-9jcdm" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.138875 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.144919 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5rhs\" (UniqueName: \"kubernetes.io/projected/488ba5bf-b48b-42f3-ba24-eba12c38a5cb-kube-api-access-j5rhs\") pod \"cinder-operator-controller-manager-6c677c69b-f8zhr\" (UID: \"488ba5bf-b48b-42f3-ba24-eba12c38a5cb\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227349 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84ch7\" (UniqueName: \"kubernetes.io/projected/1ac6c721-b338-46f7-943e-63f5db2bd354-kube-api-access-84ch7\") pod \"heat-operator-controller-manager-5f64f6f8bb-jbx46\" (UID: \"1ac6c721-b338-46f7-943e-63f5db2bd354\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227384 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpwlh\" (UniqueName: \"kubernetes.io/projected/4f0cbb29-bf13-4073-8c9d-28da25a1fbba-kube-api-access-bpwlh\") pod \"horizon-operator-controller-manager-68c6d99b8f-wnpbm\" (UID: \"4f0cbb29-bf13-4073-8c9d-28da25a1fbba\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227425 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjk95\" (UniqueName: \"kubernetes.io/projected/5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8-kube-api-access-vjk95\") pod \"ironic-operator-controller-manager-967d97867-gxd4b\" (UID: \"5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227485 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227533 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t4lx\" (UniqueName: \"kubernetes.io/projected/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-kube-api-access-9t4lx\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227593 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njkqv\" (UniqueName: \"kubernetes.io/projected/32c622c7-cf8b-4b25-836e-c13f5c35dde1-kube-api-access-njkqv\") pod \"glance-operator-controller-manager-5697bb5779-xqfkp\" (UID: \"32c622c7-cf8b-4b25-836e-c13f5c35dde1\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.227614 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85r2l\" (UniqueName: \"kubernetes.io/projected/b297d26d-5199-4ebf-b8ad-5ca6f5e53e86-kube-api-access-85r2l\") pod \"designate-operator-controller-manager-697fb699cf-vfzb5\" (UID: \"b297d26d-5199-4ebf-b8ad-5ca6f5e53e86\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:09:59 crc kubenswrapper[4921]: E1210 13:09:59.228248 4921 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 13:09:59 crc kubenswrapper[4921]: E1210 13:09:59.228282 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert podName:c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1 nodeName:}" failed. No retries permitted until 2025-12-10 13:09:59.728269129 +0000 UTC m=+796.944491043 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert") pod "infra-operator-controller-manager-78d48bff9d-7s9mj" (UID: "c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1") : secret "infra-operator-webhook-server-cert" not found Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.228908 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ln6b\" (UniqueName: \"kubernetes.io/projected/dde58658-03d9-43dc-8fe5-4be3a607934b-kube-api-access-2ln6b\") pod \"barbican-operator-controller-manager-7d9dfd778-2jpm7\" (UID: \"dde58658-03d9-43dc-8fe5-4be3a607934b\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.229014 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.241676 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.242648 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.246315 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-d6dxt" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.269818 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.271097 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpwlh\" (UniqueName: \"kubernetes.io/projected/4f0cbb29-bf13-4073-8c9d-28da25a1fbba-kube-api-access-bpwlh\") pod \"horizon-operator-controller-manager-68c6d99b8f-wnpbm\" (UID: \"4f0cbb29-bf13-4073-8c9d-28da25a1fbba\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.273581 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.275556 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84ch7\" (UniqueName: \"kubernetes.io/projected/1ac6c721-b338-46f7-943e-63f5db2bd354-kube-api-access-84ch7\") pod \"heat-operator-controller-manager-5f64f6f8bb-jbx46\" (UID: \"1ac6c721-b338-46f7-943e-63f5db2bd354\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.284954 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85r2l\" (UniqueName: \"kubernetes.io/projected/b297d26d-5199-4ebf-b8ad-5ca6f5e53e86-kube-api-access-85r2l\") pod \"designate-operator-controller-manager-697fb699cf-vfzb5\" (UID: \"b297d26d-5199-4ebf-b8ad-5ca6f5e53e86\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.285219 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-9qprs" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.292757 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njkqv\" (UniqueName: \"kubernetes.io/projected/32c622c7-cf8b-4b25-836e-c13f5c35dde1-kube-api-access-njkqv\") pod \"glance-operator-controller-manager-5697bb5779-xqfkp\" (UID: \"32c622c7-cf8b-4b25-836e-c13f5c35dde1\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.308427 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.320877 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.325078 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.325693 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t4lx\" (UniqueName: \"kubernetes.io/projected/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-kube-api-access-9t4lx\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.326150 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.328732 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjk95\" (UniqueName: \"kubernetes.io/projected/5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8-kube-api-access-vjk95\") pod \"ironic-operator-controller-manager-967d97867-gxd4b\" (UID: \"5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.329382 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.334694 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-bn9bb" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.374146 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.395150 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.402373 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.404543 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjk95\" (UniqueName: \"kubernetes.io/projected/5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8-kube-api-access-vjk95\") pod \"ironic-operator-controller-manager-967d97867-gxd4b\" (UID: \"5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.409482 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.414969 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-x7fhv" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.433177 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nrj9\" (UniqueName: \"kubernetes.io/projected/22100473-2036-47d3-846e-b9e351b7d7e1-kube-api-access-8nrj9\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-xzrb8\" (UID: \"22100473-2036-47d3-846e-b9e351b7d7e1\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.433254 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmf6\" (UniqueName: \"kubernetes.io/projected/30d38f66-817f-4412-8e03-9c55d0417ace-kube-api-access-ksmf6\") pod \"manila-operator-controller-manager-5b5fd79c9c-q5sxl\" (UID: \"30d38f66-817f-4412-8e03-9c55d0417ace\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.433289 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssghd\" (UniqueName: \"kubernetes.io/projected/83057698-5071-4487-9ed1-3649fe298d00-kube-api-access-ssghd\") pod \"keystone-operator-controller-manager-7765d96ddf-mf85l\" (UID: \"83057698-5071-4487-9ed1-3649fe298d00\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.444371 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.473122 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.481654 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.485240 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.486193 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.492680 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-jk685" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.493477 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.494409 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.497689 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-rljjk" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.510177 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.518875 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.540939 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.543378 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssghd\" (UniqueName: \"kubernetes.io/projected/83057698-5071-4487-9ed1-3649fe298d00-kube-api-access-ssghd\") pod \"keystone-operator-controller-manager-7765d96ddf-mf85l\" (UID: \"83057698-5071-4487-9ed1-3649fe298d00\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.543659 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nrj9\" (UniqueName: \"kubernetes.io/projected/22100473-2036-47d3-846e-b9e351b7d7e1-kube-api-access-8nrj9\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-xzrb8\" (UID: \"22100473-2036-47d3-846e-b9e351b7d7e1\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.543788 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq5l5\" (UniqueName: \"kubernetes.io/projected/a0e4b9fa-671d-45a9-93e9-078eb23843db-kube-api-access-zq5l5\") pod \"mariadb-operator-controller-manager-79c8c4686c-94t86\" (UID: \"a0e4b9fa-671d-45a9-93e9-078eb23843db\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.543894 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmf6\" (UniqueName: \"kubernetes.io/projected/30d38f66-817f-4412-8e03-9c55d0417ace-kube-api-access-ksmf6\") pod \"manila-operator-controller-manager-5b5fd79c9c-q5sxl\" (UID: \"30d38f66-817f-4412-8e03-9c55d0417ace\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.544018 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r47wq\" (UniqueName: \"kubernetes.io/projected/e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad-kube-api-access-r47wq\") pod \"nova-operator-controller-manager-697bc559fc-kgtwn\" (UID: \"e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.546904 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.547859 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.550008 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-rglqw" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.550154 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.560956 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.562092 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.566282 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-2lwnz" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.566670 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssghd\" (UniqueName: \"kubernetes.io/projected/83057698-5071-4487-9ed1-3649fe298d00-kube-api-access-ssghd\") pod \"keystone-operator-controller-manager-7765d96ddf-mf85l\" (UID: \"83057698-5071-4487-9ed1-3649fe298d00\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.570587 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.572121 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.582931 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmf6\" (UniqueName: \"kubernetes.io/projected/30d38f66-817f-4412-8e03-9c55d0417ace-kube-api-access-ksmf6\") pod \"manila-operator-controller-manager-5b5fd79c9c-q5sxl\" (UID: \"30d38f66-817f-4412-8e03-9c55d0417ace\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.587058 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nrj9\" (UniqueName: \"kubernetes.io/projected/22100473-2036-47d3-846e-b9e351b7d7e1-kube-api-access-8nrj9\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-xzrb8\" (UID: \"22100473-2036-47d3-846e-b9e351b7d7e1\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.603107 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.607540 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.609735 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-v4ppq" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.623344 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.638447 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.643325 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-hbpss"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.646198 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.652909 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.652962 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq5l5\" (UniqueName: \"kubernetes.io/projected/a0e4b9fa-671d-45a9-93e9-078eb23843db-kube-api-access-zq5l5\") pod \"mariadb-operator-controller-manager-79c8c4686c-94t86\" (UID: \"a0e4b9fa-671d-45a9-93e9-078eb23843db\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.652997 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgllx\" (UniqueName: \"kubernetes.io/projected/cdc476bb-a407-403e-9bbe-e2f62e0ce23b-kube-api-access-lgllx\") pod \"placement-operator-controller-manager-78f8948974-hbpss\" (UID: \"cdc476bb-a407-403e-9bbe-e2f62e0ce23b\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.653020 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r47wq\" (UniqueName: \"kubernetes.io/projected/e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad-kube-api-access-r47wq\") pod \"nova-operator-controller-manager-697bc559fc-kgtwn\" (UID: \"e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.653040 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5bmk\" (UniqueName: \"kubernetes.io/projected/feac9e56-73ad-4870-8306-0789acfe8a8f-kube-api-access-m5bmk\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.653060 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stsh4\" (UniqueName: \"kubernetes.io/projected/5c3dc67e-e2a4-426f-b365-d325af35b1b6-kube-api-access-stsh4\") pod \"swift-operator-controller-manager-9d58d64bc-8879b\" (UID: \"5c3dc67e-e2a4-426f-b365-d325af35b1b6\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.653085 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28p47\" (UniqueName: \"kubernetes.io/projected/fc42a438-92ea-4f71-aeaa-62d388327002-kube-api-access-28p47\") pod \"ovn-operator-controller-manager-b6456fdb6-pdm9q\" (UID: \"fc42a438-92ea-4f71-aeaa-62d388327002\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.653116 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nmbx\" (UniqueName: \"kubernetes.io/projected/c261f893-dc59-43ba-8a28-09528971bfb1-kube-api-access-5nmbx\") pod \"octavia-operator-controller-manager-998648c74-8zlrp\" (UID: \"c261f893-dc59-43ba-8a28-09528971bfb1\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.654507 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.662534 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.665312 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-7w7vz" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.667583 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-hbpss"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.674556 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.683055 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.684005 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.686025 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.695242 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r47wq\" (UniqueName: \"kubernetes.io/projected/e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad-kube-api-access-r47wq\") pod \"nova-operator-controller-manager-697bc559fc-kgtwn\" (UID: \"e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.705645 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.713038 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq5l5\" (UniqueName: \"kubernetes.io/projected/a0e4b9fa-671d-45a9-93e9-078eb23843db-kube-api-access-zq5l5\") pod \"mariadb-operator-controller-manager-79c8c4686c-94t86\" (UID: \"a0e4b9fa-671d-45a9-93e9-078eb23843db\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.713096 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.716048 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.755801 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-pwhz5" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.757276 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-24wfs" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.771682 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773140 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28p47\" (UniqueName: \"kubernetes.io/projected/fc42a438-92ea-4f71-aeaa-62d388327002-kube-api-access-28p47\") pod \"ovn-operator-controller-manager-b6456fdb6-pdm9q\" (UID: \"fc42a438-92ea-4f71-aeaa-62d388327002\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773230 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nmbx\" (UniqueName: \"kubernetes.io/projected/c261f893-dc59-43ba-8a28-09528971bfb1-kube-api-access-5nmbx\") pod \"octavia-operator-controller-manager-998648c74-8zlrp\" (UID: \"c261f893-dc59-43ba-8a28-09528971bfb1\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773310 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773417 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773455 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgllx\" (UniqueName: \"kubernetes.io/projected/cdc476bb-a407-403e-9bbe-e2f62e0ce23b-kube-api-access-lgllx\") pod \"placement-operator-controller-manager-78f8948974-hbpss\" (UID: \"cdc476bb-a407-403e-9bbe-e2f62e0ce23b\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773820 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5bmk\" (UniqueName: \"kubernetes.io/projected/feac9e56-73ad-4870-8306-0789acfe8a8f-kube-api-access-m5bmk\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.773857 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stsh4\" (UniqueName: \"kubernetes.io/projected/5c3dc67e-e2a4-426f-b365-d325af35b1b6-kube-api-access-stsh4\") pod \"swift-operator-controller-manager-9d58d64bc-8879b\" (UID: \"5c3dc67e-e2a4-426f-b365-d325af35b1b6\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.775701 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:09:59 crc kubenswrapper[4921]: E1210 13:09:59.777727 4921 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:09:59 crc kubenswrapper[4921]: E1210 13:09:59.777951 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert podName:feac9e56-73ad-4870-8306-0789acfe8a8f nodeName:}" failed. No retries permitted until 2025-12-10 13:10:00.277782688 +0000 UTC m=+797.494004612 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fkj985" (UID: "feac9e56-73ad-4870-8306-0789acfe8a8f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:09:59 crc kubenswrapper[4921]: E1210 13:09:59.779313 4921 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 13:09:59 crc kubenswrapper[4921]: E1210 13:09:59.779379 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert podName:c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:00.77936424 +0000 UTC m=+797.995586164 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert") pod "infra-operator-controller-manager-78d48bff9d-7s9mj" (UID: "c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1") : secret "infra-operator-webhook-server-cert" not found Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.793029 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.797156 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-f85xg" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.811088 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.812806 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5bmk\" (UniqueName: \"kubernetes.io/projected/feac9e56-73ad-4870-8306-0789acfe8a8f-kube-api-access-m5bmk\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.820176 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stsh4\" (UniqueName: \"kubernetes.io/projected/5c3dc67e-e2a4-426f-b365-d325af35b1b6-kube-api-access-stsh4\") pod \"swift-operator-controller-manager-9d58d64bc-8879b\" (UID: \"5c3dc67e-e2a4-426f-b365-d325af35b1b6\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.862699 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.866281 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nmbx\" (UniqueName: \"kubernetes.io/projected/c261f893-dc59-43ba-8a28-09528971bfb1-kube-api-access-5nmbx\") pod \"octavia-operator-controller-manager-998648c74-8zlrp\" (UID: \"c261f893-dc59-43ba-8a28-09528971bfb1\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.881009 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28p47\" (UniqueName: \"kubernetes.io/projected/fc42a438-92ea-4f71-aeaa-62d388327002-kube-api-access-28p47\") pod \"ovn-operator-controller-manager-b6456fdb6-pdm9q\" (UID: \"fc42a438-92ea-4f71-aeaa-62d388327002\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.890750 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgllx\" (UniqueName: \"kubernetes.io/projected/cdc476bb-a407-403e-9bbe-e2f62e0ce23b-kube-api-access-lgllx\") pod \"placement-operator-controller-manager-78f8948974-hbpss\" (UID: \"cdc476bb-a407-403e-9bbe-e2f62e0ce23b\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.894549 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfq9t\" (UniqueName: \"kubernetes.io/projected/c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e-kube-api-access-tfq9t\") pod \"telemetry-operator-controller-manager-58d5ff84df-p7nv4\" (UID: \"c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.894796 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9dk4\" (UniqueName: \"kubernetes.io/projected/532d1487-1112-4341-a787-1981d7093054-kube-api-access-h9dk4\") pod \"test-operator-controller-manager-5854674fcc-jkgbt\" (UID: \"532d1487-1112-4341-a787-1981d7093054\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.933555 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.980464 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz"] Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.983647 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.986877 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.987419 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-689gt" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.987586 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 10 13:09:59 crc kubenswrapper[4921]: I1210 13:09:59.989209 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.007465 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfq9t\" (UniqueName: \"kubernetes.io/projected/c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e-kube-api-access-tfq9t\") pod \"telemetry-operator-controller-manager-58d5ff84df-p7nv4\" (UID: \"c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.007564 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfztm\" (UniqueName: \"kubernetes.io/projected/d838a4f0-a117-4807-aca0-8bc00b6ad6f1-kube-api-access-sfztm\") pod \"watcher-operator-controller-manager-75944c9b7-x4zn4\" (UID: \"d838a4f0-a117-4807-aca0-8bc00b6ad6f1\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.007631 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9dk4\" (UniqueName: \"kubernetes.io/projected/532d1487-1112-4341-a787-1981d7093054-kube-api-access-h9dk4\") pod \"test-operator-controller-manager-5854674fcc-jkgbt\" (UID: \"532d1487-1112-4341-a787-1981d7093054\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.019043 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.032382 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfq9t\" (UniqueName: \"kubernetes.io/projected/c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e-kube-api-access-tfq9t\") pod \"telemetry-operator-controller-manager-58d5ff84df-p7nv4\" (UID: \"c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.041018 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9dk4\" (UniqueName: \"kubernetes.io/projected/532d1487-1112-4341-a787-1981d7093054-kube-api-access-h9dk4\") pod \"test-operator-controller-manager-5854674fcc-jkgbt\" (UID: \"532d1487-1112-4341-a787-1981d7093054\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.071754 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.072693 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.075049 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-tjphb" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.079489 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.082811 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.103292 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.110438 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.111274 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.111317 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkgmk\" (UniqueName: \"kubernetes.io/projected/597fc39b-b10d-4b12-af6e-ce35966daa75-kube-api-access-mkgmk\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.111339 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfztm\" (UniqueName: \"kubernetes.io/projected/d838a4f0-a117-4807-aca0-8bc00b6ad6f1-kube-api-access-sfztm\") pod \"watcher-operator-controller-manager-75944c9b7-x4zn4\" (UID: \"d838a4f0-a117-4807-aca0-8bc00b6ad6f1\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.111369 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.151481 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.154631 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfztm\" (UniqueName: \"kubernetes.io/projected/d838a4f0-a117-4807-aca0-8bc00b6ad6f1-kube-api-access-sfztm\") pod \"watcher-operator-controller-manager-75944c9b7-x4zn4\" (UID: \"d838a4f0-a117-4807-aca0-8bc00b6ad6f1\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.176133 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.205070 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.217469 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.217526 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkgmk\" (UniqueName: \"kubernetes.io/projected/597fc39b-b10d-4b12-af6e-ce35966daa75-kube-api-access-mkgmk\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.217560 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.217607 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfg85\" (UniqueName: \"kubernetes.io/projected/0c17ea5f-4baf-427a-a2ba-106bb7248194-kube-api-access-mfg85\") pod \"rabbitmq-cluster-operator-manager-668c99d594-h76ts\" (UID: \"0c17ea5f-4baf-427a-a2ba-106bb7248194\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.217808 4921 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.217857 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:00.717841991 +0000 UTC m=+797.934063915 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "metrics-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.221743 4921 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.221798 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:00.721785136 +0000 UTC m=+797.938007060 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.259793 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:10:00 crc kubenswrapper[4921]: W1210 13:10:00.290021 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32c622c7_cf8b_4b25_836e_c13f5c35dde1.slice/crio-15f8eb59a78b7293cbd6edf79e0846a08a79cf519a33d66b58102989bc764fd2 WatchSource:0}: Error finding container 15f8eb59a78b7293cbd6edf79e0846a08a79cf519a33d66b58102989bc764fd2: Status 404 returned error can't find the container with id 15f8eb59a78b7293cbd6edf79e0846a08a79cf519a33d66b58102989bc764fd2 Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.292955 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkgmk\" (UniqueName: \"kubernetes.io/projected/597fc39b-b10d-4b12-af6e-ce35966daa75-kube-api-access-mkgmk\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.318080 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfg85\" (UniqueName: \"kubernetes.io/projected/0c17ea5f-4baf-427a-a2ba-106bb7248194-kube-api-access-mfg85\") pod \"rabbitmq-cluster-operator-manager-668c99d594-h76ts\" (UID: \"0c17ea5f-4baf-427a-a2ba-106bb7248194\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.318151 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.318301 4921 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.318352 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert podName:feac9e56-73ad-4870-8306-0789acfe8a8f nodeName:}" failed. No retries permitted until 2025-12-10 13:10:01.318339556 +0000 UTC m=+798.534561480 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fkj985" (UID: "feac9e56-73ad-4870-8306-0789acfe8a8f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.339258 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfg85\" (UniqueName: \"kubernetes.io/projected/0c17ea5f-4baf-427a-a2ba-106bb7248194-kube-api-access-mfg85\") pod \"rabbitmq-cluster-operator-manager-668c99d594-h76ts\" (UID: \"0c17ea5f-4baf-427a-a2ba-106bb7248194\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.433099 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.621655 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm"] Dec 10 13:10:00 crc kubenswrapper[4921]: W1210 13:10:00.624200 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f0cbb29_bf13_4073_8c9d_28da25a1fbba.slice/crio-9e860174d7a4bd39d434f436cd4a8fdfcdb3be999a133f4a57aa6e400cd9eea9 WatchSource:0}: Error finding container 9e860174d7a4bd39d434f436cd4a8fdfcdb3be999a133f4a57aa6e400cd9eea9: Status 404 returned error can't find the container with id 9e860174d7a4bd39d434f436cd4a8fdfcdb3be999a133f4a57aa6e400cd9eea9 Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.631165 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46"] Dec 10 13:10:00 crc kubenswrapper[4921]: W1210 13:10:00.633096 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ac6c721_b338_46f7_943e_63f5db2bd354.slice/crio-df8be5dc0d363cec902db2c92247e36fc79fe0792f0bd983a64506b6793533e9 WatchSource:0}: Error finding container df8be5dc0d363cec902db2c92247e36fc79fe0792f0bd983a64506b6793533e9: Status 404 returned error can't find the container with id df8be5dc0d363cec902db2c92247e36fc79fe0792f0bd983a64506b6793533e9 Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.660138 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.726301 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.726370 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.726500 4921 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.726547 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:01.726532893 +0000 UTC m=+798.942754817 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.726686 4921 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.726784 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:01.726757039 +0000 UTC m=+798.942978993 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "metrics-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.776614 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" event={"ID":"5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8","Type":"ContainerStarted","Data":"1ecb08ad77c9f6b942b563cee566ee863679f899a9d1a45d58753469b17cce17"} Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.780639 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" event={"ID":"32c622c7-cf8b-4b25-836e-c13f5c35dde1","Type":"ContainerStarted","Data":"15f8eb59a78b7293cbd6edf79e0846a08a79cf519a33d66b58102989bc764fd2"} Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.783266 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" event={"ID":"4f0cbb29-bf13-4073-8c9d-28da25a1fbba","Type":"ContainerStarted","Data":"9e860174d7a4bd39d434f436cd4a8fdfcdb3be999a133f4a57aa6e400cd9eea9"} Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.785000 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" event={"ID":"1ac6c721-b338-46f7-943e-63f5db2bd354","Type":"ContainerStarted","Data":"df8be5dc0d363cec902db2c92247e36fc79fe0792f0bd983a64506b6793533e9"} Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.786240 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" event={"ID":"488ba5bf-b48b-42f3-ba24-eba12c38a5cb","Type":"ContainerStarted","Data":"aba452be46452ed367e51f1a9507c7101067a1f8ce4ee4f0b92edf5c8bf8cd62"} Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.825293 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.827267 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.827504 4921 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: E1210 13:10:00.827551 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert podName:c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:02.827538412 +0000 UTC m=+800.043760326 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert") pod "infra-operator-controller-manager-78d48bff9d-7s9mj" (UID: "c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1") : secret "infra-operator-webhook-server-cert" not found Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.842898 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.849986 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.855466 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8"] Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.861947 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5"] Dec 10 13:10:00 crc kubenswrapper[4921]: W1210 13:10:00.863223 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc42a438_92ea_4f71_aeaa_62d388327002.slice/crio-385ad40347662369439ad3f617e8ab7d41ee226b5b4cada9f8875cada035ea5f WatchSource:0}: Error finding container 385ad40347662369439ad3f617e8ab7d41ee226b5b4cada9f8875cada035ea5f: Status 404 returned error can't find the container with id 385ad40347662369439ad3f617e8ab7d41ee226b5b4cada9f8875cada035ea5f Dec 10 13:10:00 crc kubenswrapper[4921]: I1210 13:10:00.887102 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl"] Dec 10 13:10:00 crc kubenswrapper[4921]: W1210 13:10:00.891975 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30d38f66_817f_4412_8e03_9c55d0417ace.slice/crio-c86a6b29b6027a5d6be125375b8f19af2d434084ca5bbcf4b465f21d2736e95b WatchSource:0}: Error finding container c86a6b29b6027a5d6be125375b8f19af2d434084ca5bbcf4b465f21d2736e95b: Status 404 returned error can't find the container with id c86a6b29b6027a5d6be125375b8f19af2d434084ca5bbcf4b465f21d2736e95b Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.009629 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.029105 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.043826 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.058721 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.073014 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-hbpss"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.100767 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt"] Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.117705 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5nmbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-8zlrp_openstack-operators(c261f893-dc59-43ba-8a28-09528971bfb1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: W1210 13:10:01.118011 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod532d1487_1112_4341_a787_1981d7093054.slice/crio-5c68310b077d3e2a90a9278c89f4e1e73e5fc33dcf7f01810db065161128ed59 WatchSource:0}: Error finding container 5c68310b077d3e2a90a9278c89f4e1e73e5fc33dcf7f01810db065161128ed59: Status 404 returned error can't find the container with id 5c68310b077d3e2a90a9278c89f4e1e73e5fc33dcf7f01810db065161128ed59 Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.118245 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stsh4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-8879b_openstack-operators(5c3dc67e-e2a4-426f-b365-d325af35b1b6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.131502 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stsh4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-8879b_openstack-operators(5c3dc67e-e2a4-426f-b365-d325af35b1b6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.133221 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" podUID="5c3dc67e-e2a4-426f-b365-d325af35b1b6" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.133522 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tfq9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-p7nv4_openstack-operators(c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.135785 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h9dk4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-jkgbt_openstack-operators(532d1487-1112-4341-a787-1981d7093054): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.135947 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tfq9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-p7nv4_openstack-operators(c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.136974 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5nmbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-8zlrp_openstack-operators(c261f893-dc59-43ba-8a28-09528971bfb1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.137022 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" podUID="c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.138035 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" podUID="c261f893-dc59-43ba-8a28-09528971bfb1" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.140956 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h9dk4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-jkgbt_openstack-operators(532d1487-1112-4341-a787-1981d7093054): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.142250 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" podUID="532d1487-1112-4341-a787-1981d7093054" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.145475 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.234150 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts"] Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.270635 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4"] Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.316014 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sfztm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75944c9b7-x4zn4_openstack-operators(d838a4f0-a117-4807-aca0-8bc00b6ad6f1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.323886 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sfztm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75944c9b7-x4zn4_openstack-operators(d838a4f0-a117-4807-aca0-8bc00b6ad6f1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.334701 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podUID="d838a4f0-a117-4807-aca0-8bc00b6ad6f1" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.335028 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.335189 4921 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.335234 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert podName:feac9e56-73ad-4870-8306-0789acfe8a8f nodeName:}" failed. No retries permitted until 2025-12-10 13:10:03.335219649 +0000 UTC m=+800.551441573 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fkj985" (UID: "feac9e56-73ad-4870-8306-0789acfe8a8f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.740415 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.740493 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.740655 4921 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.740700 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:03.740687404 +0000 UTC m=+800.956909318 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "webhook-server-cert" not found Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.740899 4921 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.740963 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:03.740947191 +0000 UTC m=+800.957169115 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "metrics-server-cert" not found Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.797399 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" event={"ID":"c261f893-dc59-43ba-8a28-09528971bfb1","Type":"ContainerStarted","Data":"5da2a6e0f621c7084234520600d3df2f044742d844efa34188fa4e8c4d832376"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.800323 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" event={"ID":"532d1487-1112-4341-a787-1981d7093054","Type":"ContainerStarted","Data":"5c68310b077d3e2a90a9278c89f4e1e73e5fc33dcf7f01810db065161128ed59"} Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.802362 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" podUID="c261f893-dc59-43ba-8a28-09528971bfb1" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.802841 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" event={"ID":"cdc476bb-a407-403e-9bbe-e2f62e0ce23b","Type":"ContainerStarted","Data":"641709f4fb8a483d44e7e7b438058ba80aee7790c7f0e8328d5c87982fa4750c"} Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.803111 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" podUID="532d1487-1112-4341-a787-1981d7093054" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.803939 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" event={"ID":"83057698-5071-4487-9ed1-3649fe298d00","Type":"ContainerStarted","Data":"e98cc16ef028decd30fd034f97680dee768a3fbf8f3632f4d607ba54dc9ba585"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.806318 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" event={"ID":"dde58658-03d9-43dc-8fe5-4be3a607934b","Type":"ContainerStarted","Data":"b2715fceeb94e822977c2a6144d1a530c17fcd66f7b48fd6e9204ce57f8ea261"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.807636 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" event={"ID":"5c3dc67e-e2a4-426f-b365-d325af35b1b6","Type":"ContainerStarted","Data":"16535540ee3f4b6b610c8517715998845200608e7fe0b7d4491625cf38218488"} Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.812112 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" podUID="5c3dc67e-e2a4-426f-b365-d325af35b1b6" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.815490 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" event={"ID":"30d38f66-817f-4412-8e03-9c55d0417ace","Type":"ContainerStarted","Data":"c86a6b29b6027a5d6be125375b8f19af2d434084ca5bbcf4b465f21d2736e95b"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.820972 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" event={"ID":"22100473-2036-47d3-846e-b9e351b7d7e1","Type":"ContainerStarted","Data":"7d3183754a0ea9f7c917e6ad4e803b112d03e85565665917f31581106ac48e3d"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.822978 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" event={"ID":"fc42a438-92ea-4f71-aeaa-62d388327002","Type":"ContainerStarted","Data":"385ad40347662369439ad3f617e8ab7d41ee226b5b4cada9f8875cada035ea5f"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.824859 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" event={"ID":"e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad","Type":"ContainerStarted","Data":"9294735de716a8e7641983dfea91cc045cc98a79108e643d4ce602c38dc190dc"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.826281 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" event={"ID":"d838a4f0-a117-4807-aca0-8bc00b6ad6f1","Type":"ContainerStarted","Data":"df71bccd33b45ac4048c7d446cf655a0c1eecffdcace1c56bdd49147c46b1308"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.828676 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" event={"ID":"0c17ea5f-4baf-427a-a2ba-106bb7248194","Type":"ContainerStarted","Data":"297637330d2ced968d52bd879d0f3c513c47d729731f7cd2fb04624770f3da24"} Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.836251 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podUID="d838a4f0-a117-4807-aca0-8bc00b6ad6f1" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.838746 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" event={"ID":"b297d26d-5199-4ebf-b8ad-5ca6f5e53e86","Type":"ContainerStarted","Data":"f919788e7bd08298cf70626525b035b97752b364ea18a30656d3e6ec7c87c793"} Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.842535 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" event={"ID":"c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e","Type":"ContainerStarted","Data":"e0304775a4e47ef5b8ce6b23831646a2110eceef803105fe39296f564ddb30a1"} Dec 10 13:10:01 crc kubenswrapper[4921]: E1210 13:10:01.844137 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" podUID="c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e" Dec 10 13:10:01 crc kubenswrapper[4921]: I1210 13:10:01.844894 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" event={"ID":"a0e4b9fa-671d-45a9-93e9-078eb23843db","Type":"ContainerStarted","Data":"f8ce25864f82e316ddc920c89ebdd957ac47c658519c7ba9f0a5013d7f888c6a"} Dec 10 13:10:02 crc kubenswrapper[4921]: I1210 13:10:02.861124 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.861243 4921 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.861541 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert podName:c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:06.861523296 +0000 UTC m=+804.077745220 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert") pod "infra-operator-controller-manager-78d48bff9d-7s9mj" (UID: "c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1") : secret "infra-operator-webhook-server-cert" not found Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.875188 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" podUID="c261f893-dc59-43ba-8a28-09528971bfb1" Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.875873 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" podUID="c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e" Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.875948 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" podUID="5c3dc67e-e2a4-426f-b365-d325af35b1b6" Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.876018 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podUID="d838a4f0-a117-4807-aca0-8bc00b6ad6f1" Dec 10 13:10:02 crc kubenswrapper[4921]: E1210 13:10:02.876065 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" podUID="532d1487-1112-4341-a787-1981d7093054" Dec 10 13:10:03 crc kubenswrapper[4921]: I1210 13:10:03.377428 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:03 crc kubenswrapper[4921]: E1210 13:10:03.378503 4921 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:03 crc kubenswrapper[4921]: E1210 13:10:03.378548 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert podName:feac9e56-73ad-4870-8306-0789acfe8a8f nodeName:}" failed. No retries permitted until 2025-12-10 13:10:07.378534943 +0000 UTC m=+804.594756867 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fkj985" (UID: "feac9e56-73ad-4870-8306-0789acfe8a8f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:03 crc kubenswrapper[4921]: I1210 13:10:03.791497 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:03 crc kubenswrapper[4921]: I1210 13:10:03.791561 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:03 crc kubenswrapper[4921]: E1210 13:10:03.791682 4921 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 13:10:03 crc kubenswrapper[4921]: E1210 13:10:03.791684 4921 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 13:10:03 crc kubenswrapper[4921]: E1210 13:10:03.791723 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:07.791708964 +0000 UTC m=+805.007930878 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "webhook-server-cert" not found Dec 10 13:10:03 crc kubenswrapper[4921]: E1210 13:10:03.791763 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:07.791732095 +0000 UTC m=+805.007954019 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "metrics-server-cert" not found Dec 10 13:10:06 crc kubenswrapper[4921]: I1210 13:10:06.939167 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:06 crc kubenswrapper[4921]: E1210 13:10:06.939404 4921 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 13:10:06 crc kubenswrapper[4921]: E1210 13:10:06.939649 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert podName:c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:14.939630444 +0000 UTC m=+812.155852368 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert") pod "infra-operator-controller-manager-78d48bff9d-7s9mj" (UID: "c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1") : secret "infra-operator-webhook-server-cert" not found Dec 10 13:10:07 crc kubenswrapper[4921]: I1210 13:10:07.445170 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:07 crc kubenswrapper[4921]: E1210 13:10:07.445313 4921 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:07 crc kubenswrapper[4921]: E1210 13:10:07.445369 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert podName:feac9e56-73ad-4870-8306-0789acfe8a8f nodeName:}" failed. No retries permitted until 2025-12-10 13:10:15.445353818 +0000 UTC m=+812.661575742 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fkj985" (UID: "feac9e56-73ad-4870-8306-0789acfe8a8f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 13:10:07 crc kubenswrapper[4921]: I1210 13:10:07.849943 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:07 crc kubenswrapper[4921]: I1210 13:10:07.850017 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:07 crc kubenswrapper[4921]: E1210 13:10:07.850141 4921 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 13:10:07 crc kubenswrapper[4921]: E1210 13:10:07.850194 4921 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 13:10:07 crc kubenswrapper[4921]: E1210 13:10:07.850209 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:15.850193035 +0000 UTC m=+813.066414959 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "metrics-server-cert" not found Dec 10 13:10:07 crc kubenswrapper[4921]: E1210 13:10:07.850241 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:15.850226676 +0000 UTC m=+813.066448600 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "webhook-server-cert" not found Dec 10 13:10:14 crc kubenswrapper[4921]: I1210 13:10:14.950005 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:14 crc kubenswrapper[4921]: I1210 13:10:14.957165 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1-cert\") pod \"infra-operator-controller-manager-78d48bff9d-7s9mj\" (UID: \"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:14 crc kubenswrapper[4921]: I1210 13:10:14.989370 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:15 crc kubenswrapper[4921]: I1210 13:10:15.456316 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:15 crc kubenswrapper[4921]: I1210 13:10:15.461035 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/feac9e56-73ad-4870-8306-0789acfe8a8f-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fkj985\" (UID: \"feac9e56-73ad-4870-8306-0789acfe8a8f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:15 crc kubenswrapper[4921]: I1210 13:10:15.503896 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:15 crc kubenswrapper[4921]: I1210 13:10:15.861575 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:15 crc kubenswrapper[4921]: I1210 13:10:15.861642 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:15 crc kubenswrapper[4921]: E1210 13:10:15.861777 4921 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 13:10:15 crc kubenswrapper[4921]: E1210 13:10:15.861837 4921 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs podName:597fc39b-b10d-4b12-af6e-ce35966daa75 nodeName:}" failed. No retries permitted until 2025-12-10 13:10:31.861820603 +0000 UTC m=+829.078042517 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs") pod "openstack-operator-controller-manager-7bf4f7754c-bzqjz" (UID: "597fc39b-b10d-4b12-af6e-ce35966daa75") : secret "webhook-server-cert" not found Dec 10 13:10:15 crc kubenswrapper[4921]: I1210 13:10:15.865931 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-metrics-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:16 crc kubenswrapper[4921]: E1210 13:10:16.564636 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 10 13:10:16 crc kubenswrapper[4921]: E1210 13:10:16.565369 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2ln6b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-2jpm7_openstack-operators(dde58658-03d9-43dc-8fe5-4be3a607934b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:17 crc kubenswrapper[4921]: E1210 13:10:17.583644 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 10 13:10:17 crc kubenswrapper[4921]: E1210 13:10:17.584507 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-28p47,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-pdm9q_openstack-operators(fc42a438-92ea-4f71-aeaa-62d388327002): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:18 crc kubenswrapper[4921]: E1210 13:10:18.427530 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a" Dec 10 13:10:18 crc kubenswrapper[4921]: E1210 13:10:18.428026 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ksmf6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5b5fd79c9c-q5sxl_openstack-operators(30d38f66-817f-4412-8e03-9c55d0417ace): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.351196 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l2jrj"] Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.353316 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.371645 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l2jrj"] Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.390541 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-utilities\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.390948 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9jqj\" (UniqueName: \"kubernetes.io/projected/e4eeb986-72ad-4c59-a947-b34fd3dd101b-kube-api-access-x9jqj\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.390995 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-catalog-content\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.491798 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-utilities\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.491865 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9jqj\" (UniqueName: \"kubernetes.io/projected/e4eeb986-72ad-4c59-a947-b34fd3dd101b-kube-api-access-x9jqj\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.491896 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-catalog-content\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.492353 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-utilities\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.492446 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-catalog-content\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.520899 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9jqj\" (UniqueName: \"kubernetes.io/projected/e4eeb986-72ad-4c59-a947-b34fd3dd101b-kube-api-access-x9jqj\") pod \"redhat-operators-l2jrj\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:22 crc kubenswrapper[4921]: E1210 13:10:22.629636 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 10 13:10:22 crc kubenswrapper[4921]: E1210 13:10:22.629925 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-84ch7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-jbx46_openstack-operators(1ac6c721-b338-46f7-943e-63f5db2bd354): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:22 crc kubenswrapper[4921]: I1210 13:10:22.673613 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:10:26 crc kubenswrapper[4921]: E1210 13:10:26.121859 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 10 13:10:26 crc kubenswrapper[4921]: E1210 13:10:26.123148 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bpwlh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-wnpbm_openstack-operators(4f0cbb29-bf13-4073-8c9d-28da25a1fbba): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:26 crc kubenswrapper[4921]: E1210 13:10:26.623821 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad" Dec 10 13:10:26 crc kubenswrapper[4921]: E1210 13:10:26.624134 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zq5l5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-94t86_openstack-operators(a0e4b9fa-671d-45a9-93e9-078eb23843db): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:27 crc kubenswrapper[4921]: E1210 13:10:27.954207 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a" Dec 10 13:10:27 crc kubenswrapper[4921]: E1210 13:10:27.954946 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-85r2l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-697fb699cf-vfzb5_openstack-operators(b297d26d-5199-4ebf-b8ad-5ca6f5e53e86): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:28 crc kubenswrapper[4921]: E1210 13:10:28.602309 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87" Dec 10 13:10:28 crc kubenswrapper[4921]: E1210 13:10:28.602816 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vjk95,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-967d97867-gxd4b_openstack-operators(5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:29 crc kubenswrapper[4921]: E1210 13:10:29.106906 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 10 13:10:29 crc kubenswrapper[4921]: E1210 13:10:29.107156 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r47wq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-kgtwn_openstack-operators(e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:29 crc kubenswrapper[4921]: E1210 13:10:29.552445 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3" Dec 10 13:10:29 crc kubenswrapper[4921]: E1210 13:10:29.552601 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j5rhs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-f8zhr_openstack-operators(488ba5bf-b48b-42f3-ba24-eba12c38a5cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:29 crc kubenswrapper[4921]: I1210 13:10:29.951768 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985"] Dec 10 13:10:30 crc kubenswrapper[4921]: E1210 13:10:30.166497 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991" Dec 10 13:10:30 crc kubenswrapper[4921]: E1210 13:10:30.166683 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stsh4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-8879b_openstack-operators(5c3dc67e-e2a4-426f-b365-d325af35b1b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:31 crc kubenswrapper[4921]: I1210 13:10:31.928838 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:31 crc kubenswrapper[4921]: I1210 13:10:31.939979 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/597fc39b-b10d-4b12-af6e-ce35966daa75-webhook-certs\") pod \"openstack-operator-controller-manager-7bf4f7754c-bzqjz\" (UID: \"597fc39b-b10d-4b12-af6e-ce35966daa75\") " pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:32 crc kubenswrapper[4921]: I1210 13:10:32.132543 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:34 crc kubenswrapper[4921]: E1210 13:10:34.669252 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 10 13:10:34 crc kubenswrapper[4921]: E1210 13:10:34.669793 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h9dk4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-jkgbt_openstack-operators(532d1487-1112-4341-a787-1981d7093054): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:35 crc kubenswrapper[4921]: E1210 13:10:35.272564 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 10 13:10:35 crc kubenswrapper[4921]: E1210 13:10:35.272767 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ssghd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-mf85l_openstack-operators(83057698-5071-4487-9ed1-3649fe298d00): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:35 crc kubenswrapper[4921]: E1210 13:10:35.817074 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f" Dec 10 13:10:35 crc kubenswrapper[4921]: E1210 13:10:35.817996 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tfq9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-p7nv4_openstack-operators(c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:38 crc kubenswrapper[4921]: E1210 13:10:38.408558 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 10 13:10:38 crc kubenswrapper[4921]: E1210 13:10:38.408754 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5nmbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-8zlrp_openstack-operators(c261f893-dc59-43ba-8a28-09528971bfb1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:38 crc kubenswrapper[4921]: E1210 13:10:38.824106 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a" Dec 10 13:10:38 crc kubenswrapper[4921]: E1210 13:10:38.824275 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sfztm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75944c9b7-x4zn4_openstack-operators(d838a4f0-a117-4807-aca0-8bc00b6ad6f1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:39 crc kubenswrapper[4921]: E1210 13:10:39.253597 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 10 13:10:39 crc kubenswrapper[4921]: E1210 13:10:39.253780 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mfg85,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-h76ts_openstack-operators(0c17ea5f-4baf-427a-a2ba-106bb7248194): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:10:39 crc kubenswrapper[4921]: E1210 13:10:39.254966 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" podUID="0c17ea5f-4baf-427a-a2ba-106bb7248194" Dec 10 13:10:39 crc kubenswrapper[4921]: I1210 13:10:39.658855 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj"] Dec 10 13:10:39 crc kubenswrapper[4921]: I1210 13:10:39.752954 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l2jrj"] Dec 10 13:10:39 crc kubenswrapper[4921]: W1210 13:10:39.978600 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4b5934f_4fde_47aa_a14e_fdb6f5fe7af1.slice/crio-b07f73a4b7ca0587ae0ddc74ab9d34e3a0a073d1cde4d85a71229ed741af8d17 WatchSource:0}: Error finding container b07f73a4b7ca0587ae0ddc74ab9d34e3a0a073d1cde4d85a71229ed741af8d17: Status 404 returned error can't find the container with id b07f73a4b7ca0587ae0ddc74ab9d34e3a0a073d1cde4d85a71229ed741af8d17 Dec 10 13:10:39 crc kubenswrapper[4921]: W1210 13:10:39.983654 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4eeb986_72ad_4c59_a947_b34fd3dd101b.slice/crio-9df590ce342126d4db29ba6ef0c47a418f1cfb34661deba57cf9768c3150b89b WatchSource:0}: Error finding container 9df590ce342126d4db29ba6ef0c47a418f1cfb34661deba57cf9768c3150b89b: Status 404 returned error can't find the container with id 9df590ce342126d4db29ba6ef0c47a418f1cfb34661deba57cf9768c3150b89b Dec 10 13:10:39 crc kubenswrapper[4921]: I1210 13:10:39.993028 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz"] Dec 10 13:10:40 crc kubenswrapper[4921]: I1210 13:10:40.175431 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerStarted","Data":"9df590ce342126d4db29ba6ef0c47a418f1cfb34661deba57cf9768c3150b89b"} Dec 10 13:10:40 crc kubenswrapper[4921]: I1210 13:10:40.179379 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" event={"ID":"32c622c7-cf8b-4b25-836e-c13f5c35dde1","Type":"ContainerStarted","Data":"3c53665463f7abc20f0b555f463aed9a200fa272b27910ce4d0f442eab0eab87"} Dec 10 13:10:40 crc kubenswrapper[4921]: I1210 13:10:40.181352 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" event={"ID":"22100473-2036-47d3-846e-b9e351b7d7e1","Type":"ContainerStarted","Data":"5ecc2ce48f3bf92f74f5c7abf32801b901e7a2368b4afed795a356f46e8be2d8"} Dec 10 13:10:40 crc kubenswrapper[4921]: I1210 13:10:40.194421 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" event={"ID":"cdc476bb-a407-403e-9bbe-e2f62e0ce23b","Type":"ContainerStarted","Data":"530e7ae0d2e6f8cd048b0e8c737d2fd519c0200a6d25c61c9e74cf378142d1c2"} Dec 10 13:10:40 crc kubenswrapper[4921]: I1210 13:10:40.196091 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" event={"ID":"feac9e56-73ad-4870-8306-0789acfe8a8f","Type":"ContainerStarted","Data":"0ebb8c8d3f9d34d6173a72c1ed54b18a14553b928049ad4d4142110363994faf"} Dec 10 13:10:40 crc kubenswrapper[4921]: I1210 13:10:40.197810 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" event={"ID":"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1","Type":"ContainerStarted","Data":"b07f73a4b7ca0587ae0ddc74ab9d34e3a0a073d1cde4d85a71229ed741af8d17"} Dec 10 13:10:40 crc kubenswrapper[4921]: E1210 13:10:40.198708 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" podUID="0c17ea5f-4baf-427a-a2ba-106bb7248194" Dec 10 13:10:42 crc kubenswrapper[4921]: I1210 13:10:42.221836 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" event={"ID":"597fc39b-b10d-4b12-af6e-ce35966daa75","Type":"ContainerStarted","Data":"73d9cfa29f7e251527166f50b4e4de7445db7e2e53e4e0989ed6a2f9f0cce3ae"} Dec 10 13:10:43 crc kubenswrapper[4921]: I1210 13:10:43.229927 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" event={"ID":"597fc39b-b10d-4b12-af6e-ce35966daa75","Type":"ContainerStarted","Data":"b2b4f7531eab30ab153c2dc0ca46f2c6ddffcc43ba5d49d6c61f0b39197a9f3e"} Dec 10 13:10:43 crc kubenswrapper[4921]: I1210 13:10:43.230737 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:43 crc kubenswrapper[4921]: I1210 13:10:43.237793 4921 generic.go:334] "Generic (PLEG): container finished" podID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerID="120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213" exitCode=0 Dec 10 13:10:43 crc kubenswrapper[4921]: I1210 13:10:43.237874 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerDied","Data":"120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213"} Dec 10 13:10:43 crc kubenswrapper[4921]: I1210 13:10:43.301958 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" podStartSLOduration=44.301936157 podStartE2EDuration="44.301936157s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:10:43.293321906 +0000 UTC m=+840.509543850" watchObservedRunningTime="2025-12-10 13:10:43.301936157 +0000 UTC m=+840.518158081" Dec 10 13:10:44 crc kubenswrapper[4921]: E1210 13:10:44.772094 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" podUID="83057698-5071-4487-9ed1-3649fe298d00" Dec 10 13:10:45 crc kubenswrapper[4921]: E1210 13:10:45.006172 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podUID="d838a4f0-a117-4807-aca0-8bc00b6ad6f1" Dec 10 13:10:45 crc kubenswrapper[4921]: E1210 13:10:45.109587 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" podUID="c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e" Dec 10 13:10:45 crc kubenswrapper[4921]: E1210 13:10:45.111183 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" podUID="5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8" Dec 10 13:10:45 crc kubenswrapper[4921]: E1210 13:10:45.123938 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" podUID="a0e4b9fa-671d-45a9-93e9-078eb23843db" Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.254297 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" event={"ID":"32c622c7-cf8b-4b25-836e-c13f5c35dde1","Type":"ContainerStarted","Data":"cba6ea10abcdb89470524b40b9e0a02b636c6bdcb313659070fb83fa21428561"} Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.255615 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.260373 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.264609 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" event={"ID":"c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e","Type":"ContainerStarted","Data":"4977c8ee9af1ea08b8ecb2c15e67eeb351f54498080ac9ba110f703fd598bba3"} Dec 10 13:10:45 crc kubenswrapper[4921]: E1210 13:10:45.266155 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" podUID="c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e" Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.267679 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" event={"ID":"cdc476bb-a407-403e-9bbe-e2f62e0ce23b","Type":"ContainerStarted","Data":"4f97fd6957838a749c331fcdb39c29c5defb259dc33942ca0e8061b94c34c759"} Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.277524 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" event={"ID":"d838a4f0-a117-4807-aca0-8bc00b6ad6f1","Type":"ContainerStarted","Data":"221303eb0d904b3d0025b300fa1c51d7f02e3ce3c44e608b75298763e30ccda8"} Dec 10 13:10:45 crc kubenswrapper[4921]: E1210 13:10:45.286593 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podUID="d838a4f0-a117-4807-aca0-8bc00b6ad6f1" Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.298003 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" event={"ID":"5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8","Type":"ContainerStarted","Data":"6fbfbba6e675d5f1dad9fa47895ed45441e63986536cff08f080d94e46e04a6a"} Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.305855 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-xqfkp" podStartSLOduration=2.773058592 podStartE2EDuration="47.305837653s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.342869044 +0000 UTC m=+797.559090968" lastFinishedPulling="2025-12-10 13:10:44.875648105 +0000 UTC m=+842.091870029" observedRunningTime="2025-12-10 13:10:45.287298346 +0000 UTC m=+842.503520290" watchObservedRunningTime="2025-12-10 13:10:45.305837653 +0000 UTC m=+842.522059577" Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.306244 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" event={"ID":"a0e4b9fa-671d-45a9-93e9-078eb23843db","Type":"ContainerStarted","Data":"19d04493e9d717f9c435c7e4faaf43a30e4a17a39e87be80d157f7de87891ccc"} Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.318247 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" event={"ID":"83057698-5071-4487-9ed1-3649fe298d00","Type":"ContainerStarted","Data":"9b441814435abc44cdc68c8330412b47e0c5b50221088aefebbbdade23e9f7c7"} Dec 10 13:10:45 crc kubenswrapper[4921]: I1210 13:10:45.329556 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" podStartSLOduration=2.655962961 podStartE2EDuration="46.329259301s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.071893116 +0000 UTC m=+798.288115040" lastFinishedPulling="2025-12-10 13:10:44.745189456 +0000 UTC m=+841.961411380" observedRunningTime="2025-12-10 13:10:45.316954741 +0000 UTC m=+842.533176665" watchObservedRunningTime="2025-12-10 13:10:45.329259301 +0000 UTC m=+842.545481235" Dec 10 13:10:46 crc kubenswrapper[4921]: I1210 13:10:46.324710 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:10:46 crc kubenswrapper[4921]: I1210 13:10:46.326141 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-hbpss" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.470248 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" podUID="fc42a438-92ea-4f71-aeaa-62d388327002" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.684495 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" podUID="30d38f66-817f-4412-8e03-9c55d0417ace" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.695888 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" podUID="dde58658-03d9-43dc-8fe5-4be3a607934b" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.758887 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" podUID="e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.759412 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" podUID="532d1487-1112-4341-a787-1981d7093054" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.796122 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" podUID="1ac6c721-b338-46f7-943e-63f5db2bd354" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.807985 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" podUID="4f0cbb29-bf13-4073-8c9d-28da25a1fbba" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.829219 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" podUID="5c3dc67e-e2a4-426f-b365-d325af35b1b6" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.846677 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" podUID="b297d26d-5199-4ebf-b8ad-5ca6f5e53e86" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.863617 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" podUID="c261f893-dc59-43ba-8a28-09528971bfb1" Dec 10 13:10:48 crc kubenswrapper[4921]: E1210 13:10:48.887720 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" podUID="488ba5bf-b48b-42f3-ba24-eba12c38a5cb" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.356611 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" event={"ID":"feac9e56-73ad-4870-8306-0789acfe8a8f","Type":"ContainerStarted","Data":"7a8fbb6f29298b68ddd60bde49da7857b22eebd9772660a38def416eb4830d46"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.356658 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" event={"ID":"feac9e56-73ad-4870-8306-0789acfe8a8f","Type":"ContainerStarted","Data":"6cfb51b4949daf6c503b9025fde11c00f6cadff272b26a04b590ff99d3db51d1"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.359197 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" event={"ID":"83057698-5071-4487-9ed1-3649fe298d00","Type":"ContainerStarted","Data":"73e0cc468dd091e6584bcbc2fea6ec650c6c35751db643526ab94112dd317848"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.359315 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.360581 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" event={"ID":"dde58658-03d9-43dc-8fe5-4be3a607934b","Type":"ContainerStarted","Data":"5476916ac83401811190cb6ccff63c0e0d08d387b2cae3d589491f21a3e52c1b"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.362940 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" event={"ID":"488ba5bf-b48b-42f3-ba24-eba12c38a5cb","Type":"ContainerStarted","Data":"96ac676f864767950aa5149437d3318c363cecf19ded84fec86ef04ec529747d"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.364721 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" event={"ID":"e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad","Type":"ContainerStarted","Data":"f9062a514284f3fddb3272ae19218c02aadefff8ad5809440478363286a93105"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.366221 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" event={"ID":"5c3dc67e-e2a4-426f-b365-d325af35b1b6","Type":"ContainerStarted","Data":"9952608d582731ff0b40ee998cd7f30a08c4e6e8240bdc326ff50fc6ab08a97e"} Dec 10 13:10:49 crc kubenswrapper[4921]: E1210 13:10:49.367906 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\"" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" podUID="5c3dc67e-e2a4-426f-b365-d325af35b1b6" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.375564 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" event={"ID":"30d38f66-817f-4412-8e03-9c55d0417ace","Type":"ContainerStarted","Data":"54660bf11a9de75024baea28ddd73a6c6968dfa7620887624947d817e170c53f"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.381111 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" event={"ID":"a0e4b9fa-671d-45a9-93e9-078eb23843db","Type":"ContainerStarted","Data":"451161de6ff536c1570c9c96a50c91cd87e2e759d4cc30db9ad312eee43d1881"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.381434 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.386433 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" event={"ID":"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1","Type":"ContainerStarted","Data":"bb63899c80bb8eeb089cc36e0ddb7fb9e279f5e359c045615463cda083c6bba5"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.386475 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" event={"ID":"c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1","Type":"ContainerStarted","Data":"2e34ecfd890208f0a2df830c538cd0efb6641aee72b801b9769734402ac61a73"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.387042 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.389544 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerStarted","Data":"756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.393842 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" event={"ID":"b297d26d-5199-4ebf-b8ad-5ca6f5e53e86","Type":"ContainerStarted","Data":"383a65db9094c4b52a235a92b94162c24e95eae10ca4e9d4abf037e7f409f02e"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.394734 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" podStartSLOduration=41.72113555 podStartE2EDuration="50.394720006s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:39.240940639 +0000 UTC m=+836.457162663" lastFinishedPulling="2025-12-10 13:10:47.914525195 +0000 UTC m=+845.130747119" observedRunningTime="2025-12-10 13:10:49.394079189 +0000 UTC m=+846.610301113" watchObservedRunningTime="2025-12-10 13:10:49.394720006 +0000 UTC m=+846.610941930" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.399936 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" event={"ID":"4f0cbb29-bf13-4073-8c9d-28da25a1fbba","Type":"ContainerStarted","Data":"1a7519cfc8f99552ecc8e9a98cc2476adb379d5771ca85239f77a8eb4e335dc9"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.405255 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" event={"ID":"532d1487-1112-4341-a787-1981d7093054","Type":"ContainerStarted","Data":"5b23ff67ca4ad0c5dec2c278db682a32fcccb13f49ed4cd14bbb40ba5e640924"} Dec 10 13:10:49 crc kubenswrapper[4921]: E1210 13:10:49.409800 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" podUID="532d1487-1112-4341-a787-1981d7093054" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.415472 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" event={"ID":"1ac6c721-b338-46f7-943e-63f5db2bd354","Type":"ContainerStarted","Data":"720501a495265f5223b40bf18c06fcd2e204db6db23b16e76cb7ca50810eb287"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.422314 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" event={"ID":"fc42a438-92ea-4f71-aeaa-62d388327002","Type":"ContainerStarted","Data":"3172363fc1faca9a90e4208c461f0f00120175ee02fd6c27c24072fe5b5b173e"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.434455 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" event={"ID":"5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8","Type":"ContainerStarted","Data":"2259d6921c43912e3bb9ec8050f455f5eb0fbb11359b62c333cb7b9d5f66f80c"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.434817 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.438632 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" event={"ID":"22100473-2036-47d3-846e-b9e351b7d7e1","Type":"ContainerStarted","Data":"aa8d8a806633bb56c2fff241aa60d9f5f15d915d7ec0487fc270ac39d2a1b378"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.439455 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.441665 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" event={"ID":"c261f893-dc59-43ba-8a28-09528971bfb1","Type":"ContainerStarted","Data":"f20cff2fdb3240eaecb126ad23d2e21fa95395258fa2e56a45590a03b05270be"} Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.442535 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" Dec 10 13:10:49 crc kubenswrapper[4921]: E1210 13:10:49.443068 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" podUID="c261f893-dc59-43ba-8a28-09528971bfb1" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.443234 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" podStartSLOduration=3.499037623 podStartE2EDuration="50.443209767s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.059450732 +0000 UTC m=+798.275672656" lastFinishedPulling="2025-12-10 13:10:48.003622886 +0000 UTC m=+845.219844800" observedRunningTime="2025-12-10 13:10:49.438067189 +0000 UTC m=+846.654289123" watchObservedRunningTime="2025-12-10 13:10:49.443209767 +0000 UTC m=+846.659431721" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.568276 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" podStartSLOduration=43.636244895 podStartE2EDuration="51.568260823s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:39.981441319 +0000 UTC m=+837.197663243" lastFinishedPulling="2025-12-10 13:10:47.913457247 +0000 UTC m=+845.129679171" observedRunningTime="2025-12-10 13:10:49.496303582 +0000 UTC m=+846.712525506" watchObservedRunningTime="2025-12-10 13:10:49.568260823 +0000 UTC m=+846.784482747" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.700512 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" podStartSLOduration=3.602610396 podStartE2EDuration="50.700498052s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.865304815 +0000 UTC m=+798.081526739" lastFinishedPulling="2025-12-10 13:10:47.963192471 +0000 UTC m=+845.179414395" observedRunningTime="2025-12-10 13:10:49.666111849 +0000 UTC m=+846.882333773" watchObservedRunningTime="2025-12-10 13:10:49.700498052 +0000 UTC m=+846.916719976" Dec 10 13:10:49 crc kubenswrapper[4921]: I1210 13:10:49.986772 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" podStartSLOduration=4.637844194 podStartE2EDuration="51.986756043s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.670064109 +0000 UTC m=+797.886286033" lastFinishedPulling="2025-12-10 13:10:48.018975958 +0000 UTC m=+845.235197882" observedRunningTime="2025-12-10 13:10:49.938790426 +0000 UTC m=+847.155012370" watchObservedRunningTime="2025-12-10 13:10:49.986756043 +0000 UTC m=+847.202977967" Dec 10 13:10:50 crc kubenswrapper[4921]: I1210 13:10:50.449621 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:51 crc kubenswrapper[4921]: I1210 13:10:51.455489 4921 generic.go:334] "Generic (PLEG): container finished" podID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerID="756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b" exitCode=0 Dec 10 13:10:51 crc kubenswrapper[4921]: I1210 13:10:51.456286 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerDied","Data":"756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b"} Dec 10 13:10:51 crc kubenswrapper[4921]: I1210 13:10:51.476446 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-xzrb8" podStartSLOduration=5.437247736 podStartE2EDuration="52.476424167s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.876864705 +0000 UTC m=+798.093086629" lastFinishedPulling="2025-12-10 13:10:47.916041136 +0000 UTC m=+845.132263060" observedRunningTime="2025-12-10 13:10:50.050401881 +0000 UTC m=+847.266623805" watchObservedRunningTime="2025-12-10 13:10:51.476424167 +0000 UTC m=+848.692646111" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.144097 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7bf4f7754c-bzqjz" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.463481 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" event={"ID":"1ac6c721-b338-46f7-943e-63f5db2bd354","Type":"ContainerStarted","Data":"ed4f78920442c7051ec59e7473f7b2ca0ca72eacebc311494a6840352cca13f7"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.465308 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.467320 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" event={"ID":"e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad","Type":"ContainerStarted","Data":"36c74e2ca4278c3a625b50a5428473409cf7e15fac94fbcc4595012b0ac2125f"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.467921 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.469146 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" event={"ID":"30d38f66-817f-4412-8e03-9c55d0417ace","Type":"ContainerStarted","Data":"7e76569312fb09390f71a35571e3a274f8202046f49c3bed6bf2f95d6c67e83b"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.469350 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.471215 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" event={"ID":"dde58658-03d9-43dc-8fe5-4be3a607934b","Type":"ContainerStarted","Data":"2f45b2f720c4ed37188e8a09393c80a705f971393d57d185ff5f27a6a008c107"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.471603 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.473071 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" event={"ID":"488ba5bf-b48b-42f3-ba24-eba12c38a5cb","Type":"ContainerStarted","Data":"0a850f1ff349c89355d7ff0db62b7f5d64b02d27eb082d33932d149e8d8c4fa6"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.473449 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.475063 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" event={"ID":"b297d26d-5199-4ebf-b8ad-5ca6f5e53e86","Type":"ContainerStarted","Data":"94d7bf8f8c14ffd0bd29c48c89fa1d252cc7839e56bcd85380dd625a193aa57f"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.475278 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.476961 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" event={"ID":"fc42a438-92ea-4f71-aeaa-62d388327002","Type":"ContainerStarted","Data":"aa6b1344985843ceb811367246821e744dae3bc1238378fd95505f1db9c619d2"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.477088 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.478958 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" event={"ID":"4f0cbb29-bf13-4073-8c9d-28da25a1fbba","Type":"ContainerStarted","Data":"aefa989e5ce9bd9d1773d0a0245c33e5d7e25d34ac5dfb02ea6a18c0079b9ead"} Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.479122 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.493119 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" podStartSLOduration=4.155900242 podStartE2EDuration="54.493102689s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.637592669 +0000 UTC m=+797.853814593" lastFinishedPulling="2025-12-10 13:10:50.974795116 +0000 UTC m=+848.191017040" observedRunningTime="2025-12-10 13:10:52.488989449 +0000 UTC m=+849.705211373" watchObservedRunningTime="2025-12-10 13:10:52.493102689 +0000 UTC m=+849.709324613" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.518669 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" podStartSLOduration=3.606118097 podStartE2EDuration="54.518655265s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.062601107 +0000 UTC m=+797.278823031" lastFinishedPulling="2025-12-10 13:10:50.975138275 +0000 UTC m=+848.191360199" observedRunningTime="2025-12-10 13:10:52.514684098 +0000 UTC m=+849.730906022" watchObservedRunningTime="2025-12-10 13:10:52.518655265 +0000 UTC m=+849.734877179" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.532345 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" podStartSLOduration=4.184803158 podStartE2EDuration="54.532327712s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.627042506 +0000 UTC m=+797.843264430" lastFinishedPulling="2025-12-10 13:10:50.97456706 +0000 UTC m=+848.190788984" observedRunningTime="2025-12-10 13:10:52.528663573 +0000 UTC m=+849.744885517" watchObservedRunningTime="2025-12-10 13:10:52.532327712 +0000 UTC m=+849.748549626" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.572745 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" podStartSLOduration=3.492731456 podStartE2EDuration="53.572729426s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.894161409 +0000 UTC m=+798.110383323" lastFinishedPulling="2025-12-10 13:10:50.974159379 +0000 UTC m=+848.190381293" observedRunningTime="2025-12-10 13:10:52.545776673 +0000 UTC m=+849.761998617" watchObservedRunningTime="2025-12-10 13:10:52.572729426 +0000 UTC m=+849.788951350" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.575949 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" podStartSLOduration=3.703456019 podStartE2EDuration="53.575936392s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.028079991 +0000 UTC m=+798.244301915" lastFinishedPulling="2025-12-10 13:10:50.900560364 +0000 UTC m=+848.116782288" observedRunningTime="2025-12-10 13:10:52.569560771 +0000 UTC m=+849.785782685" watchObservedRunningTime="2025-12-10 13:10:52.575936392 +0000 UTC m=+849.792158336" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.594081 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" podStartSLOduration=4.323176107 podStartE2EDuration="54.594065188s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.84534542 +0000 UTC m=+798.061567344" lastFinishedPulling="2025-12-10 13:10:51.116234501 +0000 UTC m=+848.332456425" observedRunningTime="2025-12-10 13:10:52.591451638 +0000 UTC m=+849.807673562" watchObservedRunningTime="2025-12-10 13:10:52.594065188 +0000 UTC m=+849.810287112" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.636269 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" podStartSLOduration=2.662727334 podStartE2EDuration="53.636253281s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.865794028 +0000 UTC m=+798.082015952" lastFinishedPulling="2025-12-10 13:10:51.839319975 +0000 UTC m=+849.055541899" observedRunningTime="2025-12-10 13:10:52.631715909 +0000 UTC m=+849.847937843" watchObservedRunningTime="2025-12-10 13:10:52.636253281 +0000 UTC m=+849.852475205" Dec 10 13:10:52 crc kubenswrapper[4921]: I1210 13:10:52.658661 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" podStartSLOduration=3.825430289 podStartE2EDuration="54.658643281s" podCreationTimestamp="2025-12-10 13:09:58 +0000 UTC" firstStartedPulling="2025-12-10 13:10:00.886934715 +0000 UTC m=+798.103156639" lastFinishedPulling="2025-12-10 13:10:51.720147707 +0000 UTC m=+848.936369631" observedRunningTime="2025-12-10 13:10:52.655451106 +0000 UTC m=+849.871673030" watchObservedRunningTime="2025-12-10 13:10:52.658643281 +0000 UTC m=+849.874865205" Dec 10 13:10:54 crc kubenswrapper[4921]: I1210 13:10:54.506035 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerStarted","Data":"29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968"} Dec 10 13:10:54 crc kubenswrapper[4921]: I1210 13:10:54.525836 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l2jrj" podStartSLOduration=22.648833136 podStartE2EDuration="32.525821236s" podCreationTimestamp="2025-12-10 13:10:22 +0000 UTC" firstStartedPulling="2025-12-10 13:10:44.280300277 +0000 UTC m=+841.496522201" lastFinishedPulling="2025-12-10 13:10:54.157288377 +0000 UTC m=+851.373510301" observedRunningTime="2025-12-10 13:10:54.521278634 +0000 UTC m=+851.737500558" watchObservedRunningTime="2025-12-10 13:10:54.525821236 +0000 UTC m=+851.742043160" Dec 10 13:10:55 crc kubenswrapper[4921]: I1210 13:10:55.001831 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-7s9mj" Dec 10 13:10:55 crc kubenswrapper[4921]: I1210 13:10:55.512317 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fkj985" Dec 10 13:10:56 crc kubenswrapper[4921]: I1210 13:10:56.519822 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" event={"ID":"0c17ea5f-4baf-427a-a2ba-106bb7248194","Type":"ContainerStarted","Data":"015612b8b750c799b4dd9ec277ca8b35973951e74ab3137cd805c7b4322440ab"} Dec 10 13:10:56 crc kubenswrapper[4921]: I1210 13:10:56.534436 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-h76ts" podStartSLOduration=2.962488322 podStartE2EDuration="57.534422536s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.25362675 +0000 UTC m=+798.469848674" lastFinishedPulling="2025-12-10 13:10:55.825560964 +0000 UTC m=+853.041782888" observedRunningTime="2025-12-10 13:10:56.532876315 +0000 UTC m=+853.749098259" watchObservedRunningTime="2025-12-10 13:10:56.534422536 +0000 UTC m=+853.750644460" Dec 10 13:10:58 crc kubenswrapper[4921]: E1210 13:10:58.194308 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podUID="d838a4f0-a117-4807-aca0-8bc00b6ad6f1" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.231702 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-f8zhr" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.324537 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-jbx46" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.332887 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-wnpbm" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.446975 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-gxd4b" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.512816 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-2jpm7" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.573615 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-vfzb5" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.640699 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-q5sxl" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.658024 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mf85l" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.796036 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-94t86" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.869084 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-kgtwn" Dec 10 13:10:59 crc kubenswrapper[4921]: I1210 13:10:59.937771 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pdm9q" Dec 10 13:11:00 crc kubenswrapper[4921]: I1210 13:11:00.546484 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" event={"ID":"c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e","Type":"ContainerStarted","Data":"4d7087fa9afd82d2e5c9328d5f40f95212463ab8a0224b23d2d2ba5e3b1ef5d2"} Dec 10 13:11:00 crc kubenswrapper[4921]: I1210 13:11:00.547007 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:11:00 crc kubenswrapper[4921]: I1210 13:11:00.566282 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" podStartSLOduration=2.7659143090000002 podStartE2EDuration="1m1.566264339s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.133455967 +0000 UTC m=+798.349677891" lastFinishedPulling="2025-12-10 13:10:59.933805997 +0000 UTC m=+857.150027921" observedRunningTime="2025-12-10 13:11:00.560313319 +0000 UTC m=+857.776535273" watchObservedRunningTime="2025-12-10 13:11:00.566264339 +0000 UTC m=+857.782486283" Dec 10 13:11:02 crc kubenswrapper[4921]: I1210 13:11:02.563025 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" event={"ID":"532d1487-1112-4341-a787-1981d7093054","Type":"ContainerStarted","Data":"9983833b60fe4fbf9b15f5e87d5fd25a3eece8084f91f8cc9347884d2466a702"} Dec 10 13:11:02 crc kubenswrapper[4921]: I1210 13:11:02.564445 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:11:02 crc kubenswrapper[4921]: I1210 13:11:02.582100 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" podStartSLOduration=2.946037753 podStartE2EDuration="1m3.582083832s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.135219115 +0000 UTC m=+798.351441039" lastFinishedPulling="2025-12-10 13:11:01.771265184 +0000 UTC m=+858.987487118" observedRunningTime="2025-12-10 13:11:02.577654513 +0000 UTC m=+859.793876447" watchObservedRunningTime="2025-12-10 13:11:02.582083832 +0000 UTC m=+859.798305756" Dec 10 13:11:02 crc kubenswrapper[4921]: I1210 13:11:02.674220 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:11:02 crc kubenswrapper[4921]: I1210 13:11:02.674276 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:11:03 crc kubenswrapper[4921]: I1210 13:11:03.570617 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" event={"ID":"c261f893-dc59-43ba-8a28-09528971bfb1","Type":"ContainerStarted","Data":"ab7c6d2f7b3716b1a27bb69b17fb223ab103f55cc76ee8074cd8663276289439"} Dec 10 13:11:03 crc kubenswrapper[4921]: I1210 13:11:03.571018 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:11:03 crc kubenswrapper[4921]: I1210 13:11:03.572440 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" event={"ID":"5c3dc67e-e2a4-426f-b365-d325af35b1b6","Type":"ContainerStarted","Data":"1f102c444e334132a051a59f687166f771cbc7866c892d337ad29ca928811885"} Dec 10 13:11:03 crc kubenswrapper[4921]: I1210 13:11:03.585896 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" podStartSLOduration=2.94964842 podStartE2EDuration="1m4.585881579s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.117570221 +0000 UTC m=+798.333792145" lastFinishedPulling="2025-12-10 13:11:02.75380338 +0000 UTC m=+859.970025304" observedRunningTime="2025-12-10 13:11:03.585228971 +0000 UTC m=+860.801450905" watchObservedRunningTime="2025-12-10 13:11:03.585881579 +0000 UTC m=+860.802103503" Dec 10 13:11:03 crc kubenswrapper[4921]: I1210 13:11:03.603681 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" podStartSLOduration=2.889003622 podStartE2EDuration="1m4.603665796s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.118165677 +0000 UTC m=+798.334387601" lastFinishedPulling="2025-12-10 13:11:02.832827851 +0000 UTC m=+860.049049775" observedRunningTime="2025-12-10 13:11:03.602112574 +0000 UTC m=+860.818334508" watchObservedRunningTime="2025-12-10 13:11:03.603665796 +0000 UTC m=+860.819887720" Dec 10 13:11:03 crc kubenswrapper[4921]: I1210 13:11:03.717651 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l2jrj" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="registry-server" probeResult="failure" output=< Dec 10 13:11:03 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 13:11:03 crc kubenswrapper[4921]: > Dec 10 13:11:09 crc kubenswrapper[4921]: I1210 13:11:09.990881 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:11:09 crc kubenswrapper[4921]: I1210 13:11:09.992475 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-8879b" Dec 10 13:11:10 crc kubenswrapper[4921]: I1210 13:11:10.113540 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-p7nv4" Dec 10 13:11:10 crc kubenswrapper[4921]: I1210 13:11:10.154370 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-8zlrp" Dec 10 13:11:10 crc kubenswrapper[4921]: I1210 13:11:10.182097 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-jkgbt" Dec 10 13:11:12 crc kubenswrapper[4921]: I1210 13:11:12.194357 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:11:12 crc kubenswrapper[4921]: I1210 13:11:12.715638 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:11:12 crc kubenswrapper[4921]: I1210 13:11:12.755078 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:11:12 crc kubenswrapper[4921]: I1210 13:11:12.947423 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l2jrj"] Dec 10 13:11:13 crc kubenswrapper[4921]: I1210 13:11:13.647719 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" event={"ID":"d838a4f0-a117-4807-aca0-8bc00b6ad6f1","Type":"ContainerStarted","Data":"0b35489c7a4229d9d531ca3b5e046339d29824e911bfe4c6dcb03357809c5228"} Dec 10 13:11:13 crc kubenswrapper[4921]: I1210 13:11:13.648694 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:11:13 crc kubenswrapper[4921]: I1210 13:11:13.670097 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" podStartSLOduration=2.720138099 podStartE2EDuration="1m14.670079883s" podCreationTimestamp="2025-12-10 13:09:59 +0000 UTC" firstStartedPulling="2025-12-10 13:10:01.315895821 +0000 UTC m=+798.532117745" lastFinishedPulling="2025-12-10 13:11:13.265837605 +0000 UTC m=+870.482059529" observedRunningTime="2025-12-10 13:11:13.669822576 +0000 UTC m=+870.886044500" watchObservedRunningTime="2025-12-10 13:11:13.670079883 +0000 UTC m=+870.886301807" Dec 10 13:11:14 crc kubenswrapper[4921]: I1210 13:11:14.652074 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l2jrj" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="registry-server" containerID="cri-o://29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968" gracePeriod=2 Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.041338 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.163013 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9jqj\" (UniqueName: \"kubernetes.io/projected/e4eeb986-72ad-4c59-a947-b34fd3dd101b-kube-api-access-x9jqj\") pod \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.163312 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-catalog-content\") pod \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.163378 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-utilities\") pod \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\" (UID: \"e4eeb986-72ad-4c59-a947-b34fd3dd101b\") " Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.163967 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-utilities" (OuterVolumeSpecName: "utilities") pod "e4eeb986-72ad-4c59-a947-b34fd3dd101b" (UID: "e4eeb986-72ad-4c59-a947-b34fd3dd101b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.168757 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4eeb986-72ad-4c59-a947-b34fd3dd101b-kube-api-access-x9jqj" (OuterVolumeSpecName: "kube-api-access-x9jqj") pod "e4eeb986-72ad-4c59-a947-b34fd3dd101b" (UID: "e4eeb986-72ad-4c59-a947-b34fd3dd101b"). InnerVolumeSpecName "kube-api-access-x9jqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.264549 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.264587 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9jqj\" (UniqueName: \"kubernetes.io/projected/e4eeb986-72ad-4c59-a947-b34fd3dd101b-kube-api-access-x9jqj\") on node \"crc\" DevicePath \"\"" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.287561 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4eeb986-72ad-4c59-a947-b34fd3dd101b" (UID: "e4eeb986-72ad-4c59-a947-b34fd3dd101b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.366209 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4eeb986-72ad-4c59-a947-b34fd3dd101b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.659085 4921 generic.go:334] "Generic (PLEG): container finished" podID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerID="29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968" exitCode=0 Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.659127 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerDied","Data":"29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968"} Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.659152 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2jrj" event={"ID":"e4eeb986-72ad-4c59-a947-b34fd3dd101b","Type":"ContainerDied","Data":"9df590ce342126d4db29ba6ef0c47a418f1cfb34661deba57cf9768c3150b89b"} Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.659169 4921 scope.go:117] "RemoveContainer" containerID="29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.659195 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2jrj" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.676665 4921 scope.go:117] "RemoveContainer" containerID="756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.693040 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l2jrj"] Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.698012 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l2jrj"] Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.706454 4921 scope.go:117] "RemoveContainer" containerID="120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.731120 4921 scope.go:117] "RemoveContainer" containerID="29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968" Dec 10 13:11:15 crc kubenswrapper[4921]: E1210 13:11:15.731664 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968\": container with ID starting with 29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968 not found: ID does not exist" containerID="29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.731718 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968"} err="failed to get container status \"29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968\": rpc error: code = NotFound desc = could not find container \"29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968\": container with ID starting with 29da06525da20183c66126a63284fdeb7fe39af9647a3dcafb36ed930c946968 not found: ID does not exist" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.731756 4921 scope.go:117] "RemoveContainer" containerID="756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b" Dec 10 13:11:15 crc kubenswrapper[4921]: E1210 13:11:15.732154 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b\": container with ID starting with 756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b not found: ID does not exist" containerID="756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.732197 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b"} err="failed to get container status \"756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b\": rpc error: code = NotFound desc = could not find container \"756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b\": container with ID starting with 756863913ddc17afa3ff16d9ede33c2076cdf8ee5d7a9b5ada295e22da75e80b not found: ID does not exist" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.732222 4921 scope.go:117] "RemoveContainer" containerID="120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213" Dec 10 13:11:15 crc kubenswrapper[4921]: E1210 13:11:15.732718 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213\": container with ID starting with 120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213 not found: ID does not exist" containerID="120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213" Dec 10 13:11:15 crc kubenswrapper[4921]: I1210 13:11:15.732746 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213"} err="failed to get container status \"120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213\": rpc error: code = NotFound desc = could not find container \"120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213\": container with ID starting with 120b22504c9fbeca2fe0a57bae078ce40d23bb6760587c32f73380f5f709e213 not found: ID does not exist" Dec 10 13:11:16 crc kubenswrapper[4921]: I1210 13:11:16.711197 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:11:16 crc kubenswrapper[4921]: I1210 13:11:16.711287 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:11:17 crc kubenswrapper[4921]: I1210 13:11:17.202450 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" path="/var/lib/kubelet/pods/e4eeb986-72ad-4c59-a947-b34fd3dd101b/volumes" Dec 10 13:11:20 crc kubenswrapper[4921]: I1210 13:11:20.263274 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-x4zn4" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.800953 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9ww65"] Dec 10 13:11:29 crc kubenswrapper[4921]: E1210 13:11:29.805142 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="registry-server" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.805324 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="registry-server" Dec 10 13:11:29 crc kubenswrapper[4921]: E1210 13:11:29.805497 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="extract-content" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.805632 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="extract-content" Dec 10 13:11:29 crc kubenswrapper[4921]: E1210 13:11:29.805762 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="extract-utilities" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.805878 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="extract-utilities" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.806558 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4eeb986-72ad-4c59-a947-b34fd3dd101b" containerName="registry-server" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.811359 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.815571 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ww65"] Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.962676 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtjhp\" (UniqueName: \"kubernetes.io/projected/8feb8732-25ce-4b70-ad69-bb98d4075b4b-kube-api-access-wtjhp\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.962725 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-utilities\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:29 crc kubenswrapper[4921]: I1210 13:11:29.962792 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-catalog-content\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.063536 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-catalog-content\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.063603 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtjhp\" (UniqueName: \"kubernetes.io/projected/8feb8732-25ce-4b70-ad69-bb98d4075b4b-kube-api-access-wtjhp\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.063627 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-utilities\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.064432 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-catalog-content\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.064933 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-utilities\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.107086 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtjhp\" (UniqueName: \"kubernetes.io/projected/8feb8732-25ce-4b70-ad69-bb98d4075b4b-kube-api-access-wtjhp\") pod \"community-operators-9ww65\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.132622 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.635612 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9ww65"] Dec 10 13:11:30 crc kubenswrapper[4921]: I1210 13:11:30.749959 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ww65" event={"ID":"8feb8732-25ce-4b70-ad69-bb98d4075b4b","Type":"ContainerStarted","Data":"9be98dc5cd6a9c098c47f7b2ad5127a78b9c7607db1a500084d4e70f0e414e88"} Dec 10 13:11:31 crc kubenswrapper[4921]: I1210 13:11:31.757283 4921 generic.go:334] "Generic (PLEG): container finished" podID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerID="06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699" exitCode=0 Dec 10 13:11:31 crc kubenswrapper[4921]: I1210 13:11:31.757322 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ww65" event={"ID":"8feb8732-25ce-4b70-ad69-bb98d4075b4b","Type":"ContainerDied","Data":"06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699"} Dec 10 13:11:32 crc kubenswrapper[4921]: I1210 13:11:32.766685 4921 generic.go:334] "Generic (PLEG): container finished" podID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerID="08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80" exitCode=0 Dec 10 13:11:32 crc kubenswrapper[4921]: I1210 13:11:32.766994 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ww65" event={"ID":"8feb8732-25ce-4b70-ad69-bb98d4075b4b","Type":"ContainerDied","Data":"08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80"} Dec 10 13:11:33 crc kubenswrapper[4921]: I1210 13:11:33.776144 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ww65" event={"ID":"8feb8732-25ce-4b70-ad69-bb98d4075b4b","Type":"ContainerStarted","Data":"55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9"} Dec 10 13:11:33 crc kubenswrapper[4921]: I1210 13:11:33.797862 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9ww65" podStartSLOduration=3.340876046 podStartE2EDuration="4.797840013s" podCreationTimestamp="2025-12-10 13:11:29 +0000 UTC" firstStartedPulling="2025-12-10 13:11:31.758621592 +0000 UTC m=+888.974843516" lastFinishedPulling="2025-12-10 13:11:33.215585559 +0000 UTC m=+890.431807483" observedRunningTime="2025-12-10 13:11:33.79401415 +0000 UTC m=+891.010236084" watchObservedRunningTime="2025-12-10 13:11:33.797840013 +0000 UTC m=+891.014061937" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.389566 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-82dmq"] Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.391157 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.422238 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-82dmq"] Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.440505 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9nb6\" (UniqueName: \"kubernetes.io/projected/3038fce5-a66d-441a-b4af-00e5afb26384-kube-api-access-s9nb6\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.440569 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-utilities\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.440613 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-catalog-content\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.545343 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9nb6\" (UniqueName: \"kubernetes.io/projected/3038fce5-a66d-441a-b4af-00e5afb26384-kube-api-access-s9nb6\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.545426 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-utilities\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.545479 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-catalog-content\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.545956 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-catalog-content\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.546340 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-utilities\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.582651 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9nb6\" (UniqueName: \"kubernetes.io/projected/3038fce5-a66d-441a-b4af-00e5afb26384-kube-api-access-s9nb6\") pod \"certified-operators-82dmq\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:35 crc kubenswrapper[4921]: I1210 13:11:35.705423 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:36 crc kubenswrapper[4921]: W1210 13:11:36.162554 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3038fce5_a66d_441a_b4af_00e5afb26384.slice/crio-f6b6c553c96dc2750eee8fdd26cdd46c0d4b1e6e8a1f1fc76dc470b98169a2eb WatchSource:0}: Error finding container f6b6c553c96dc2750eee8fdd26cdd46c0d4b1e6e8a1f1fc76dc470b98169a2eb: Status 404 returned error can't find the container with id f6b6c553c96dc2750eee8fdd26cdd46c0d4b1e6e8a1f1fc76dc470b98169a2eb Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.167775 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-82dmq"] Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.464526 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nn5zn"] Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.470709 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.475084 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.476724 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.477046 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-phvzx" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.477408 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.487839 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nn5zn"] Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.561041 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kft8n\" (UniqueName: \"kubernetes.io/projected/2fe91281-0f3f-4e50-9701-5d237060f645-kube-api-access-kft8n\") pod \"dnsmasq-dns-675f4bcbfc-nn5zn\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.561124 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fe91281-0f3f-4e50-9701-5d237060f645-config\") pod \"dnsmasq-dns-675f4bcbfc-nn5zn\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.647268 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rswl8"] Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.648417 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.657213 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.662626 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-config\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.662684 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fe91281-0f3f-4e50-9701-5d237060f645-config\") pod \"dnsmasq-dns-675f4bcbfc-nn5zn\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.662723 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd7s5\" (UniqueName: \"kubernetes.io/projected/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-kube-api-access-vd7s5\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.662829 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kft8n\" (UniqueName: \"kubernetes.io/projected/2fe91281-0f3f-4e50-9701-5d237060f645-kube-api-access-kft8n\") pod \"dnsmasq-dns-675f4bcbfc-nn5zn\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.663776 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.663697 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fe91281-0f3f-4e50-9701-5d237060f645-config\") pod \"dnsmasq-dns-675f4bcbfc-nn5zn\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.672268 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rswl8"] Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.699173 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kft8n\" (UniqueName: \"kubernetes.io/projected/2fe91281-0f3f-4e50-9701-5d237060f645-kube-api-access-kft8n\") pod \"dnsmasq-dns-675f4bcbfc-nn5zn\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.765474 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.765529 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-config\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.765585 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd7s5\" (UniqueName: \"kubernetes.io/projected/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-kube-api-access-vd7s5\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.766279 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.766350 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-config\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.787716 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.800329 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd7s5\" (UniqueName: \"kubernetes.io/projected/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-kube-api-access-vd7s5\") pod \"dnsmasq-dns-78dd6ddcc-rswl8\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.804677 4921 generic.go:334] "Generic (PLEG): container finished" podID="3038fce5-a66d-441a-b4af-00e5afb26384" containerID="30f010e0c32781f1d4f3e9d3e0c6705fa988eb8d24678482b4f9ab9cfcbe3241" exitCode=0 Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.804718 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerDied","Data":"30f010e0c32781f1d4f3e9d3e0c6705fa988eb8d24678482b4f9ab9cfcbe3241"} Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.804743 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerStarted","Data":"f6b6c553c96dc2750eee8fdd26cdd46c0d4b1e6e8a1f1fc76dc470b98169a2eb"} Dec 10 13:11:36 crc kubenswrapper[4921]: I1210 13:11:36.965113 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:11:37 crc kubenswrapper[4921]: I1210 13:11:37.233640 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rswl8"] Dec 10 13:11:37 crc kubenswrapper[4921]: W1210 13:11:37.237518 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d77f2dd_23bb_44c4_9f82_ef2587a0d872.slice/crio-f1ae8a1ffb77ac7dd040b7d439a958eec77baab8280d65de06a721194d1d8d3d WatchSource:0}: Error finding container f1ae8a1ffb77ac7dd040b7d439a958eec77baab8280d65de06a721194d1d8d3d: Status 404 returned error can't find the container with id f1ae8a1ffb77ac7dd040b7d439a958eec77baab8280d65de06a721194d1d8d3d Dec 10 13:11:37 crc kubenswrapper[4921]: I1210 13:11:37.289270 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nn5zn"] Dec 10 13:11:37 crc kubenswrapper[4921]: I1210 13:11:37.816509 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerStarted","Data":"bdee4a0444c164ee43f2958715878e803bf25b9dd10b96af4fffa826d5742e9d"} Dec 10 13:11:37 crc kubenswrapper[4921]: I1210 13:11:37.820958 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" event={"ID":"6d77f2dd-23bb-44c4-9f82-ef2587a0d872","Type":"ContainerStarted","Data":"f1ae8a1ffb77ac7dd040b7d439a958eec77baab8280d65de06a721194d1d8d3d"} Dec 10 13:11:37 crc kubenswrapper[4921]: I1210 13:11:37.821984 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" event={"ID":"2fe91281-0f3f-4e50-9701-5d237060f645","Type":"ContainerStarted","Data":"1801e11e04aecd9496637a20446ccbe107090ad375b455da8ee8f1bb8ffa854f"} Dec 10 13:11:38 crc kubenswrapper[4921]: I1210 13:11:38.925887 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nn5zn"] Dec 10 13:11:38 crc kubenswrapper[4921]: I1210 13:11:38.936331 4921 generic.go:334] "Generic (PLEG): container finished" podID="3038fce5-a66d-441a-b4af-00e5afb26384" containerID="bdee4a0444c164ee43f2958715878e803bf25b9dd10b96af4fffa826d5742e9d" exitCode=0 Dec 10 13:11:38 crc kubenswrapper[4921]: I1210 13:11:38.936370 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerDied","Data":"bdee4a0444c164ee43f2958715878e803bf25b9dd10b96af4fffa826d5742e9d"} Dec 10 13:11:38 crc kubenswrapper[4921]: I1210 13:11:38.990680 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-pfmfg"] Dec 10 13:11:38 crc kubenswrapper[4921]: I1210 13:11:38.999670 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.025778 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-pfmfg"] Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.039542 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.039592 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkvrx\" (UniqueName: \"kubernetes.io/projected/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-kube-api-access-nkvrx\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.039683 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-config\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.143427 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-config\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.144113 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.144204 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkvrx\" (UniqueName: \"kubernetes.io/projected/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-kube-api-access-nkvrx\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.145296 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-config\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.146437 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.181829 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkvrx\" (UniqueName: \"kubernetes.io/projected/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-kube-api-access-nkvrx\") pod \"dnsmasq-dns-5ccc8479f9-pfmfg\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.333922 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.679633 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rswl8"] Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.812633 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-42vg9"] Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.814912 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.846653 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-42vg9"] Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.881541 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd2pj\" (UniqueName: \"kubernetes.io/projected/cff820e5-df50-4ec5-bd20-4320a33badf1-kube-api-access-pd2pj\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.881599 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.881679 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-config\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.985223 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-config\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.985320 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd2pj\" (UniqueName: \"kubernetes.io/projected/cff820e5-df50-4ec5-bd20-4320a33badf1-kube-api-access-pd2pj\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.985352 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.986334 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-config\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:39 crc kubenswrapper[4921]: I1210 13:11:39.986450 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.020307 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd2pj\" (UniqueName: \"kubernetes.io/projected/cff820e5-df50-4ec5-bd20-4320a33badf1-kube-api-access-pd2pj\") pod \"dnsmasq-dns-57d769cc4f-42vg9\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.023284 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-pfmfg"] Dec 10 13:11:40 crc kubenswrapper[4921]: W1210 13:11:40.096895 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29ac1ff7_0ecb_4414_be96_ee937ba5bbb7.slice/crio-40e85c94a1efff0842cfcf74e37cf86498edf599a7b36a6eef3c885d99465dac WatchSource:0}: Error finding container 40e85c94a1efff0842cfcf74e37cf86498edf599a7b36a6eef3c885d99465dac: Status 404 returned error can't find the container with id 40e85c94a1efff0842cfcf74e37cf86498edf599a7b36a6eef3c885d99465dac Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.141805 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.142541 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.161142 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.164591 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.165955 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.171984 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.172461 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.172600 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.172722 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.172844 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.172984 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fzqbd" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.173147 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.181433 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.230421 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.290540 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.290590 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.290622 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.290663 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.290684 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.290727 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.292485 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.292563 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.292585 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk5nq\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-kube-api-access-zk5nq\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.292652 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.292672 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.399648 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.400006 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401365 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401426 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk5nq\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-kube-api-access-zk5nq\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401486 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401502 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401543 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401598 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401637 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401675 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401732 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.401761 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.402843 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.403101 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.404104 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.418788 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.422275 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.422859 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.422976 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.427012 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.427568 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.432932 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk5nq\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-kube-api-access-zk5nq\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.483905 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.585328 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.931054 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.934693 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.940653 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-2k2nw" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.947494 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-42vg9"] Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.948422 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.948573 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.948526 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.948733 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.948758 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.948784 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.961708 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:11:40 crc kubenswrapper[4921]: I1210 13:11:40.986724 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerStarted","Data":"ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7"} Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.013064 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" event={"ID":"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7","Type":"ContainerStarted","Data":"40e85c94a1efff0842cfcf74e37cf86498edf599a7b36a6eef3c885d99465dac"} Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.018305 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-82dmq" podStartSLOduration=3.370768566 podStartE2EDuration="6.01829071s" podCreationTimestamp="2025-12-10 13:11:35 +0000 UTC" firstStartedPulling="2025-12-10 13:11:36.806464739 +0000 UTC m=+894.022686663" lastFinishedPulling="2025-12-10 13:11:39.453986883 +0000 UTC m=+896.670208807" observedRunningTime="2025-12-10 13:11:41.012060673 +0000 UTC m=+898.228282597" watchObservedRunningTime="2025-12-10 13:11:41.01829071 +0000 UTC m=+898.234512634" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.018962 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvw6t\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-kube-api-access-jvw6t\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019015 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019037 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019108 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019173 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019213 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-config-data\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019269 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c651083f-4dd3-4963-892f-ddbc5ef1af05-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019284 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c651083f-4dd3-4963-892f-ddbc5ef1af05-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019300 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019332 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.019370 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.119775 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121142 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvw6t\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-kube-api-access-jvw6t\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121181 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121215 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121248 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121274 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121300 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-config-data\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121334 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121352 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c651083f-4dd3-4963-892f-ddbc5ef1af05-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121369 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c651083f-4dd3-4963-892f-ddbc5ef1af05-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121409 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121434 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121808 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.121943 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.122955 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.124298 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.124512 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.124932 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-config-data\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.129201 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c651083f-4dd3-4963-892f-ddbc5ef1af05-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.135110 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.136858 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c651083f-4dd3-4963-892f-ddbc5ef1af05-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.146633 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.154939 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvw6t\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-kube-api-access-jvw6t\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.223369 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.279166 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.304250 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:11:41 crc kubenswrapper[4921]: I1210 13:11:41.570628 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ww65"] Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.036017 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5","Type":"ContainerStarted","Data":"b70740192fffb66ab4383666a58c9f78e25c8f205c631e3260069c5c2bee2735"} Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.043736 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" event={"ID":"cff820e5-df50-4ec5-bd20-4320a33badf1","Type":"ContainerStarted","Data":"4ecbc90e9fc3ff6d3ebc0179852cfa1407130626c52f2019bfaf93def16cdf5f"} Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.149023 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.315167 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.316964 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.324052 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.324224 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-hc2p7" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.325995 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.326142 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.338516 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.343422 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443256 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/65febf2e-fbb3-42a6-96e0-b7933c0911dd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443336 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-config-data-default\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443401 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443422 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-kolla-config\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443437 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443474 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65febf2e-fbb3-42a6-96e0-b7933c0911dd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443493 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/65febf2e-fbb3-42a6-96e0-b7933c0911dd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.443521 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7nrn\" (UniqueName: \"kubernetes.io/projected/65febf2e-fbb3-42a6-96e0-b7933c0911dd-kube-api-access-l7nrn\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545128 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545169 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-kolla-config\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545190 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545208 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65febf2e-fbb3-42a6-96e0-b7933c0911dd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545227 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/65febf2e-fbb3-42a6-96e0-b7933c0911dd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545246 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7nrn\" (UniqueName: \"kubernetes.io/projected/65febf2e-fbb3-42a6-96e0-b7933c0911dd-kube-api-access-l7nrn\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545302 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/65febf2e-fbb3-42a6-96e0-b7933c0911dd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.545340 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-config-data-default\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.546208 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.546273 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-config-data-default\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.547122 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/65febf2e-fbb3-42a6-96e0-b7933c0911dd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.547334 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-kolla-config\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.548319 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65febf2e-fbb3-42a6-96e0-b7933c0911dd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.554844 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65febf2e-fbb3-42a6-96e0-b7933c0911dd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.576537 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/65febf2e-fbb3-42a6-96e0-b7933c0911dd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.578461 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7nrn\" (UniqueName: \"kubernetes.io/projected/65febf2e-fbb3-42a6-96e0-b7933c0911dd-kube-api-access-l7nrn\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.594295 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"65febf2e-fbb3-42a6-96e0-b7933c0911dd\") " pod="openstack/openstack-galera-0" Dec 10 13:11:42 crc kubenswrapper[4921]: I1210 13:11:42.652705 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.067877 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9ww65" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="registry-server" containerID="cri-o://55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9" gracePeriod=2 Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.068514 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c651083f-4dd3-4963-892f-ddbc5ef1af05","Type":"ContainerStarted","Data":"b98b85ea672a94d33b52ead314c1b3c811ba91dc059d7cef791b66117c491386"} Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.361326 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.698229 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.699406 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.708691 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-bkwlv" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.708912 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.709037 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.709204 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.738657 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780660 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/18df86ca-4da7-4979-8a7d-1db5b93c42f4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780706 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780740 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/18df86ca-4da7-4979-8a7d-1db5b93c42f4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780760 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18df86ca-4da7-4979-8a7d-1db5b93c42f4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780799 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-727hp\" (UniqueName: \"kubernetes.io/projected/18df86ca-4da7-4979-8a7d-1db5b93c42f4-kube-api-access-727hp\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780816 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.780954 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.781079 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.874232 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.875163 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.881172 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.881431 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-2dlm4" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.882953 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888446 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888506 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/18df86ca-4da7-4979-8a7d-1db5b93c42f4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888525 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888552 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/18df86ca-4da7-4979-8a7d-1db5b93c42f4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888570 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18df86ca-4da7-4979-8a7d-1db5b93c42f4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888607 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-727hp\" (UniqueName: \"kubernetes.io/projected/18df86ca-4da7-4979-8a7d-1db5b93c42f4-kube-api-access-727hp\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888620 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.888647 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.889802 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.890113 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/18df86ca-4da7-4979-8a7d-1db5b93c42f4-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.891229 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.891458 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.894668 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/18df86ca-4da7-4979-8a7d-1db5b93c42f4-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.908204 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18df86ca-4da7-4979-8a7d-1db5b93c42f4-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.939221 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/18df86ca-4da7-4979-8a7d-1db5b93c42f4-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.941300 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.951955 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-727hp\" (UniqueName: \"kubernetes.io/projected/18df86ca-4da7-4979-8a7d-1db5b93c42f4-kube-api-access-727hp\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.990292 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-kolla-config\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.990328 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-config-data\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.990412 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.990448 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkpzq\" (UniqueName: \"kubernetes.io/projected/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-kube-api-access-lkpzq\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.990504 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:43 crc kubenswrapper[4921]: I1210 13:11:43.994456 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"18df86ca-4da7-4979-8a7d-1db5b93c42f4\") " pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.027738 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.066706 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.092069 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.092144 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-kolla-config\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.092163 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-config-data\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.092205 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.092230 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkpzq\" (UniqueName: \"kubernetes.io/projected/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-kube-api-access-lkpzq\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.093716 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-config-data\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.095799 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-kolla-config\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.104414 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.120552 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkpzq\" (UniqueName: \"kubernetes.io/projected/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-kube-api-access-lkpzq\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.140287 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fbcc23-a641-4787-9edc-7bf8e8e46a79-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c0fbcc23-a641-4787-9edc-7bf8e8e46a79\") " pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.192679 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtjhp\" (UniqueName: \"kubernetes.io/projected/8feb8732-25ce-4b70-ad69-bb98d4075b4b-kube-api-access-wtjhp\") pod \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.192732 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-utilities\") pod \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.192804 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-catalog-content\") pod \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\" (UID: \"8feb8732-25ce-4b70-ad69-bb98d4075b4b\") " Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.195035 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-utilities" (OuterVolumeSpecName: "utilities") pod "8feb8732-25ce-4b70-ad69-bb98d4075b4b" (UID: "8feb8732-25ce-4b70-ad69-bb98d4075b4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.197857 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8feb8732-25ce-4b70-ad69-bb98d4075b4b-kube-api-access-wtjhp" (OuterVolumeSpecName: "kube-api-access-wtjhp") pod "8feb8732-25ce-4b70-ad69-bb98d4075b4b" (UID: "8feb8732-25ce-4b70-ad69-bb98d4075b4b"). InnerVolumeSpecName "kube-api-access-wtjhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.210327 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.211023 4921 generic.go:334] "Generic (PLEG): container finished" podID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerID="55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9" exitCode=0 Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.211110 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ww65" event={"ID":"8feb8732-25ce-4b70-ad69-bb98d4075b4b","Type":"ContainerDied","Data":"55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9"} Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.211128 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9ww65" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.212683 4921 scope.go:117] "RemoveContainer" containerID="55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.233738 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9ww65" event={"ID":"8feb8732-25ce-4b70-ad69-bb98d4075b4b","Type":"ContainerDied","Data":"9be98dc5cd6a9c098c47f7b2ad5127a78b9c7607db1a500084d4e70f0e414e88"} Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.244093 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"65febf2e-fbb3-42a6-96e0-b7933c0911dd","Type":"ContainerStarted","Data":"2dae56691fcfc89f90edc674e1632b6bbb645d7c380007c77cff0de416292ecc"} Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.294521 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtjhp\" (UniqueName: \"kubernetes.io/projected/8feb8732-25ce-4b70-ad69-bb98d4075b4b-kube-api-access-wtjhp\") on node \"crc\" DevicePath \"\"" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.294543 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.325740 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8feb8732-25ce-4b70-ad69-bb98d4075b4b" (UID: "8feb8732-25ce-4b70-ad69-bb98d4075b4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.396169 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8feb8732-25ce-4b70-ad69-bb98d4075b4b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.574038 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9ww65"] Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.581772 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9ww65"] Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.689609 4921 scope.go:117] "RemoveContainer" containerID="08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.872033 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.904247 4921 scope.go:117] "RemoveContainer" containerID="06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699" Dec 10 13:11:44 crc kubenswrapper[4921]: I1210 13:11:44.965700 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.109928 4921 scope.go:117] "RemoveContainer" containerID="55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9" Dec 10 13:11:45 crc kubenswrapper[4921]: E1210 13:11:45.118850 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9\": container with ID starting with 55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9 not found: ID does not exist" containerID="55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.118894 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9"} err="failed to get container status \"55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9\": rpc error: code = NotFound desc = could not find container \"55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9\": container with ID starting with 55af304d716429adcd32e6f0022d48100a66f29329bc5972b6d96ecb065d0eb9 not found: ID does not exist" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.118926 4921 scope.go:117] "RemoveContainer" containerID="08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80" Dec 10 13:11:45 crc kubenswrapper[4921]: E1210 13:11:45.119615 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80\": container with ID starting with 08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80 not found: ID does not exist" containerID="08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.119639 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80"} err="failed to get container status \"08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80\": rpc error: code = NotFound desc = could not find container \"08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80\": container with ID starting with 08158fe38656266a89145c894b740da2ed26f740448ec37a7d93e406a6844a80 not found: ID does not exist" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.119695 4921 scope.go:117] "RemoveContainer" containerID="06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699" Dec 10 13:11:45 crc kubenswrapper[4921]: E1210 13:11:45.120640 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699\": container with ID starting with 06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699 not found: ID does not exist" containerID="06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.120658 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699"} err="failed to get container status \"06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699\": rpc error: code = NotFound desc = could not find container \"06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699\": container with ID starting with 06e26de8240564675389d7b86d45407b3093ed643afc6e9877524031d9f7d699 not found: ID does not exist" Dec 10 13:11:45 crc kubenswrapper[4921]: W1210 13:11:45.144983 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0fbcc23_a641_4787_9edc_7bf8e8e46a79.slice/crio-848f87e6c3b0b823acd8dcefdcba9c39007c14a80a737e0fea0ce87a92d3b8e0 WatchSource:0}: Error finding container 848f87e6c3b0b823acd8dcefdcba9c39007c14a80a737e0fea0ce87a92d3b8e0: Status 404 returned error can't find the container with id 848f87e6c3b0b823acd8dcefdcba9c39007c14a80a737e0fea0ce87a92d3b8e0 Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.207939 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" path="/var/lib/kubelet/pods/8feb8732-25ce-4b70-ad69-bb98d4075b4b/volumes" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.264265 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c0fbcc23-a641-4787-9edc-7bf8e8e46a79","Type":"ContainerStarted","Data":"848f87e6c3b0b823acd8dcefdcba9c39007c14a80a737e0fea0ce87a92d3b8e0"} Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.286142 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"18df86ca-4da7-4979-8a7d-1db5b93c42f4","Type":"ContainerStarted","Data":"5810424c396890b63fbf8e5c021bc47060dbe684db6d72ba5ff37b841ea642e1"} Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.706528 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.706566 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.764767 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:11:45 crc kubenswrapper[4921]: E1210 13:11:45.765192 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="extract-utilities" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.765217 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="extract-utilities" Dec 10 13:11:45 crc kubenswrapper[4921]: E1210 13:11:45.765244 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="extract-content" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.765252 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="extract-content" Dec 10 13:11:45 crc kubenswrapper[4921]: E1210 13:11:45.765269 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="registry-server" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.765277 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="registry-server" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.765468 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="8feb8732-25ce-4b70-ad69-bb98d4075b4b" containerName="registry-server" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.766123 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.768097 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-5sdlr" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.772983 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.825968 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxmg9\" (UniqueName: \"kubernetes.io/projected/4f843ca5-c8e0-4c44-a626-3cb41c83bab3-kube-api-access-wxmg9\") pod \"kube-state-metrics-0\" (UID: \"4f843ca5-c8e0-4c44-a626-3cb41c83bab3\") " pod="openstack/kube-state-metrics-0" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.886282 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.928897 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxmg9\" (UniqueName: \"kubernetes.io/projected/4f843ca5-c8e0-4c44-a626-3cb41c83bab3-kube-api-access-wxmg9\") pod \"kube-state-metrics-0\" (UID: \"4f843ca5-c8e0-4c44-a626-3cb41c83bab3\") " pod="openstack/kube-state-metrics-0" Dec 10 13:11:45 crc kubenswrapper[4921]: I1210 13:11:45.965523 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxmg9\" (UniqueName: \"kubernetes.io/projected/4f843ca5-c8e0-4c44-a626-3cb41c83bab3-kube-api-access-wxmg9\") pod \"kube-state-metrics-0\" (UID: \"4f843ca5-c8e0-4c44-a626-3cb41c83bab3\") " pod="openstack/kube-state-metrics-0" Dec 10 13:11:46 crc kubenswrapper[4921]: I1210 13:11:46.108084 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 13:11:46 crc kubenswrapper[4921]: I1210 13:11:46.438614 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:11:46 crc kubenswrapper[4921]: I1210 13:11:46.712793 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:11:46 crc kubenswrapper[4921]: I1210 13:11:46.713144 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:11:46 crc kubenswrapper[4921]: I1210 13:11:46.840289 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:11:47 crc kubenswrapper[4921]: I1210 13:11:47.398898 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4f843ca5-c8e0-4c44-a626-3cb41c83bab3","Type":"ContainerStarted","Data":"36cdb02f3799e4df826fd1642d79c27d8c34e649a3eb10fdf2f68a57b400df50"} Dec 10 13:11:47 crc kubenswrapper[4921]: I1210 13:11:47.766166 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-82dmq"] Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.410812 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-82dmq" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="registry-server" containerID="cri-o://ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7" gracePeriod=2 Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.697899 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-75zw8"] Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.698841 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.714841 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.715160 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.716495 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vxvxt" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.753715 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-zv79d"] Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.764012 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.774224 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-75zw8"] Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.799696 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-zv79d"] Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801573 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llmgn\" (UniqueName: \"kubernetes.io/projected/be165009-1ecf-4849-8cff-e83071094e81-kube-api-access-llmgn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801656 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be165009-1ecf-4849-8cff-e83071094e81-scripts\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801694 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-run\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801742 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-lib\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801759 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8444cda9-1de8-49a1-81b3-ebf79bae9995-scripts\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801780 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-run\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801834 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/be165009-1ecf-4849-8cff-e83071094e81-ovn-controller-tls-certs\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801861 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be165009-1ecf-4849-8cff-e83071094e81-combined-ca-bundle\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801899 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-log\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801915 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-log-ovn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801930 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-etc-ovs\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801969 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-run-ovn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.801992 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsfc6\" (UniqueName: \"kubernetes.io/projected/8444cda9-1de8-49a1-81b3-ebf79bae9995-kube-api-access-vsfc6\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903502 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-lib\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903553 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8444cda9-1de8-49a1-81b3-ebf79bae9995-scripts\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903578 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-run\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903618 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/be165009-1ecf-4849-8cff-e83071094e81-ovn-controller-tls-certs\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903645 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be165009-1ecf-4849-8cff-e83071094e81-combined-ca-bundle\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903663 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-log\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903682 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-log-ovn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903700 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-etc-ovs\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903753 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-run-ovn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903783 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsfc6\" (UniqueName: \"kubernetes.io/projected/8444cda9-1de8-49a1-81b3-ebf79bae9995-kube-api-access-vsfc6\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903808 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llmgn\" (UniqueName: \"kubernetes.io/projected/be165009-1ecf-4849-8cff-e83071094e81-kube-api-access-llmgn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903852 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be165009-1ecf-4849-8cff-e83071094e81-scripts\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.903890 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-run\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.906239 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-run\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.906311 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-run\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.906422 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-lib\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.908043 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-run-ovn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.908217 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/be165009-1ecf-4849-8cff-e83071094e81-var-log-ovn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.908256 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-var-log\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.908360 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8444cda9-1de8-49a1-81b3-ebf79bae9995-etc-ovs\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.911938 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be165009-1ecf-4849-8cff-e83071094e81-scripts\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.912659 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8444cda9-1de8-49a1-81b3-ebf79bae9995-scripts\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.934204 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be165009-1ecf-4849-8cff-e83071094e81-combined-ca-bundle\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.943367 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/be165009-1ecf-4849-8cff-e83071094e81-ovn-controller-tls-certs\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.944813 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsfc6\" (UniqueName: \"kubernetes.io/projected/8444cda9-1de8-49a1-81b3-ebf79bae9995-kube-api-access-vsfc6\") pod \"ovn-controller-ovs-zv79d\" (UID: \"8444cda9-1de8-49a1-81b3-ebf79bae9995\") " pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:48 crc kubenswrapper[4921]: I1210 13:11:48.951487 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llmgn\" (UniqueName: \"kubernetes.io/projected/be165009-1ecf-4849-8cff-e83071094e81-kube-api-access-llmgn\") pod \"ovn-controller-75zw8\" (UID: \"be165009-1ecf-4849-8cff-e83071094e81\") " pod="openstack/ovn-controller-75zw8" Dec 10 13:11:49 crc kubenswrapper[4921]: I1210 13:11:49.036494 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8" Dec 10 13:11:49 crc kubenswrapper[4921]: I1210 13:11:49.103100 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:11:49 crc kubenswrapper[4921]: I1210 13:11:49.429595 4921 generic.go:334] "Generic (PLEG): container finished" podID="3038fce5-a66d-441a-b4af-00e5afb26384" containerID="ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7" exitCode=0 Dec 10 13:11:49 crc kubenswrapper[4921]: I1210 13:11:49.429933 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerDied","Data":"ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7"} Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.291575 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-zv79d"] Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.523290 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-f9dft"] Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.524645 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.535008 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.535215 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.568016 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-f9dft"] Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.645411 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6d4x\" (UniqueName: \"kubernetes.io/projected/039043c2-b38e-4a56-a3ce-45c9462ed0f1-kube-api-access-v6d4x\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.645460 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/039043c2-b38e-4a56-a3ce-45c9462ed0f1-ovn-rundir\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.645480 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/039043c2-b38e-4a56-a3ce-45c9462ed0f1-ovs-rundir\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.645525 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/039043c2-b38e-4a56-a3ce-45c9462ed0f1-config\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.645546 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/039043c2-b38e-4a56-a3ce-45c9462ed0f1-combined-ca-bundle\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.645646 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/039043c2-b38e-4a56-a3ce-45c9462ed0f1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.752838 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/039043c2-b38e-4a56-a3ce-45c9462ed0f1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.752905 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6d4x\" (UniqueName: \"kubernetes.io/projected/039043c2-b38e-4a56-a3ce-45c9462ed0f1-kube-api-access-v6d4x\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.752929 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/039043c2-b38e-4a56-a3ce-45c9462ed0f1-ovn-rundir\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.752948 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/039043c2-b38e-4a56-a3ce-45c9462ed0f1-ovs-rundir\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.752985 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/039043c2-b38e-4a56-a3ce-45c9462ed0f1-config\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.753005 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/039043c2-b38e-4a56-a3ce-45c9462ed0f1-combined-ca-bundle\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.753720 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/039043c2-b38e-4a56-a3ce-45c9462ed0f1-ovn-rundir\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.754224 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/039043c2-b38e-4a56-a3ce-45c9462ed0f1-ovs-rundir\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.755019 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/039043c2-b38e-4a56-a3ce-45c9462ed0f1-config\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.778998 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/039043c2-b38e-4a56-a3ce-45c9462ed0f1-combined-ca-bundle\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.780066 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/039043c2-b38e-4a56-a3ce-45c9462ed0f1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.780221 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6d4x\" (UniqueName: \"kubernetes.io/projected/039043c2-b38e-4a56-a3ce-45c9462ed0f1-kube-api-access-v6d4x\") pod \"ovn-controller-metrics-f9dft\" (UID: \"039043c2-b38e-4a56-a3ce-45c9462ed0f1\") " pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:50 crc kubenswrapper[4921]: I1210 13:11:50.857088 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-f9dft" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.811941 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.819510 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.822304 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.823870 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.824078 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-42ngz" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.824264 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.835331 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.875701 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5f51319-9413-4553-b21a-6ac18a452855-config\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876115 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876142 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f5f51319-9413-4553-b21a-6ac18a452855-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876174 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876192 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876215 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876245 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnsnk\" (UniqueName: \"kubernetes.io/projected/f5f51319-9413-4553-b21a-6ac18a452855-kube-api-access-bnsnk\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.876270 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5f51319-9413-4553-b21a-6ac18a452855-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977757 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5f51319-9413-4553-b21a-6ac18a452855-config\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977819 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977853 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f5f51319-9413-4553-b21a-6ac18a452855-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977889 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977914 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977946 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.977983 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnsnk\" (UniqueName: \"kubernetes.io/projected/f5f51319-9413-4553-b21a-6ac18a452855-kube-api-access-bnsnk\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.978009 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5f51319-9413-4553-b21a-6ac18a452855-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.978518 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.978602 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5f51319-9413-4553-b21a-6ac18a452855-config\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.985701 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f5f51319-9413-4553-b21a-6ac18a452855-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.986268 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.986470 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.987476 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5f51319-9413-4553-b21a-6ac18a452855-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:51 crc kubenswrapper[4921]: I1210 13:11:51.996225 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5f51319-9413-4553-b21a-6ac18a452855-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:52 crc kubenswrapper[4921]: I1210 13:11:52.001588 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnsnk\" (UniqueName: \"kubernetes.io/projected/f5f51319-9413-4553-b21a-6ac18a452855-kube-api-access-bnsnk\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:52 crc kubenswrapper[4921]: I1210 13:11:52.095221 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f5f51319-9413-4553-b21a-6ac18a452855\") " pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:52 crc kubenswrapper[4921]: I1210 13:11:52.152645 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.279842 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.281817 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.284750 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-pbr9h" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.290866 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.291129 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.291341 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.306805 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.407418 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49be5ede-571d-40af-b73e-b2f6678df767-config\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.407552 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/49be5ede-571d-40af-b73e-b2f6678df767-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.407628 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.407670 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr2fk\" (UniqueName: \"kubernetes.io/projected/49be5ede-571d-40af-b73e-b2f6678df767-kube-api-access-zr2fk\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.407710 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.407903 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.408025 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49be5ede-571d-40af-b73e-b2f6678df767-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.408069 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509378 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr2fk\" (UniqueName: \"kubernetes.io/projected/49be5ede-571d-40af-b73e-b2f6678df767-kube-api-access-zr2fk\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509456 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509517 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509573 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49be5ede-571d-40af-b73e-b2f6678df767-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509606 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509637 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49be5ede-571d-40af-b73e-b2f6678df767-config\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509673 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/49be5ede-571d-40af-b73e-b2f6678df767-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.509708 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.510817 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/49be5ede-571d-40af-b73e-b2f6678df767-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.511027 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.511065 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49be5ede-571d-40af-b73e-b2f6678df767-config\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.512280 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49be5ede-571d-40af-b73e-b2f6678df767-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.514328 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.515071 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.523258 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/49be5ede-571d-40af-b73e-b2f6678df767-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.527159 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr2fk\" (UniqueName: \"kubernetes.io/projected/49be5ede-571d-40af-b73e-b2f6678df767-kube-api-access-zr2fk\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.539495 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"49be5ede-571d-40af-b73e-b2f6678df767\") " pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:53 crc kubenswrapper[4921]: I1210 13:11:53.612930 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 13:11:55 crc kubenswrapper[4921]: E1210 13:11:55.706553 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7 is running failed: container process not found" containerID="ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 13:11:55 crc kubenswrapper[4921]: E1210 13:11:55.708338 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7 is running failed: container process not found" containerID="ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 13:11:55 crc kubenswrapper[4921]: E1210 13:11:55.708704 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7 is running failed: container process not found" containerID="ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 13:11:55 crc kubenswrapper[4921]: E1210 13:11:55.708777 4921 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-82dmq" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="registry-server" Dec 10 13:12:03 crc kubenswrapper[4921]: W1210 13:12:03.307879 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8444cda9_1de8_49a1_81b3_ebf79bae9995.slice/crio-94a92529f05afdc8e57e122d1685ac23bccef03fb368c473890896242aa36f41 WatchSource:0}: Error finding container 94a92529f05afdc8e57e122d1685ac23bccef03fb368c473890896242aa36f41: Status 404 returned error can't find the container with id 94a92529f05afdc8e57e122d1685ac23bccef03fb368c473890896242aa36f41 Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.386808 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.578176 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-catalog-content\") pod \"3038fce5-a66d-441a-b4af-00e5afb26384\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.578307 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-utilities\") pod \"3038fce5-a66d-441a-b4af-00e5afb26384\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.578489 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9nb6\" (UniqueName: \"kubernetes.io/projected/3038fce5-a66d-441a-b4af-00e5afb26384-kube-api-access-s9nb6\") pod \"3038fce5-a66d-441a-b4af-00e5afb26384\" (UID: \"3038fce5-a66d-441a-b4af-00e5afb26384\") " Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.579200 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-utilities" (OuterVolumeSpecName: "utilities") pod "3038fce5-a66d-441a-b4af-00e5afb26384" (UID: "3038fce5-a66d-441a-b4af-00e5afb26384"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.585107 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3038fce5-a66d-441a-b4af-00e5afb26384-kube-api-access-s9nb6" (OuterVolumeSpecName: "kube-api-access-s9nb6") pod "3038fce5-a66d-441a-b4af-00e5afb26384" (UID: "3038fce5-a66d-441a-b4af-00e5afb26384"). InnerVolumeSpecName "kube-api-access-s9nb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.586967 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zv79d" event={"ID":"8444cda9-1de8-49a1-81b3-ebf79bae9995","Type":"ContainerStarted","Data":"94a92529f05afdc8e57e122d1685ac23bccef03fb368c473890896242aa36f41"} Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.590481 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82dmq" event={"ID":"3038fce5-a66d-441a-b4af-00e5afb26384","Type":"ContainerDied","Data":"f6b6c553c96dc2750eee8fdd26cdd46c0d4b1e6e8a1f1fc76dc470b98169a2eb"} Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.590514 4921 scope.go:117] "RemoveContainer" containerID="ae9f3da94b22f54f831da4db16a59c2bdcd8e97421f73e78041cfb48aef3afe7" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.590547 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82dmq" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.631600 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3038fce5-a66d-441a-b4af-00e5afb26384" (UID: "3038fce5-a66d-441a-b4af-00e5afb26384"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.679701 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.679730 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9nb6\" (UniqueName: \"kubernetes.io/projected/3038fce5-a66d-441a-b4af-00e5afb26384-kube-api-access-s9nb6\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.679743 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3038fce5-a66d-441a-b4af-00e5afb26384-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.799259 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-75zw8"] Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.940155 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-82dmq"] Dec 10 13:12:03 crc kubenswrapper[4921]: I1210 13:12:03.951906 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-82dmq"] Dec 10 13:12:04 crc kubenswrapper[4921]: I1210 13:12:04.599802 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8" event={"ID":"be165009-1ecf-4849-8cff-e83071094e81","Type":"ContainerStarted","Data":"81e5ebd189d863604ce0343f9eebd15258ed406d9c03fe009d5f47bd5637ae28"} Dec 10 13:12:04 crc kubenswrapper[4921]: E1210 13:12:04.922368 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 10 13:12:04 crc kubenswrapper[4921]: E1210 13:12:04.922639 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n5d6h579h588h88h647h579h54h549h567h67bh5c6h99h557h86h598h55hd6h68bh76h586h86h576h5c9h59fh586h686h9dh5d9h7dh689h7h7fq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lkpzq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(c0fbcc23-a641-4787-9edc-7bf8e8e46a79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:04 crc kubenswrapper[4921]: E1210 13:12:04.923868 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="c0fbcc23-a641-4787-9edc-7bf8e8e46a79" Dec 10 13:12:05 crc kubenswrapper[4921]: I1210 13:12:05.205360 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" path="/var/lib/kubelet/pods/3038fce5-a66d-441a-b4af-00e5afb26384/volumes" Dec 10 13:12:05 crc kubenswrapper[4921]: E1210 13:12:05.609642 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="c0fbcc23-a641-4787-9edc-7bf8e8e46a79" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.460104 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.460729 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jvw6t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(c651083f-4dd3-4963-892f-ddbc5ef1af05): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.462266 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" Dec 10 13:12:14 crc kubenswrapper[4921]: I1210 13:12:14.487469 4921 scope.go:117] "RemoveContainer" containerID="bdee4a0444c164ee43f2958715878e803bf25b9dd10b96af4fffa826d5742e9d" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.524253 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.524462 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zk5nq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(e098cd5a-992f-42a0-a89e-d8dd59dbbcc5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.525683 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.674126 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" Dec 10 13:12:14 crc kubenswrapper[4921]: E1210 13:12:14.674584 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" Dec 10 13:12:16 crc kubenswrapper[4921]: I1210 13:12:16.710480 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:12:16 crc kubenswrapper[4921]: I1210 13:12:16.710538 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:12:16 crc kubenswrapper[4921]: I1210 13:12:16.710581 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:12:16 crc kubenswrapper[4921]: I1210 13:12:16.711229 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d11980c56c7b436b6d741535cc469b576e206b3de67a362d6c36f2e03055365"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:12:16 crc kubenswrapper[4921]: I1210 13:12:16.711290 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://8d11980c56c7b436b6d741535cc469b576e206b3de67a362d6c36f2e03055365" gracePeriod=600 Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.537432 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-67sv6"] Dec 10 13:12:17 crc kubenswrapper[4921]: E1210 13:12:17.538064 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="extract-utilities" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.538083 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="extract-utilities" Dec 10 13:12:17 crc kubenswrapper[4921]: E1210 13:12:17.540828 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="registry-server" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.540853 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="registry-server" Dec 10 13:12:17 crc kubenswrapper[4921]: E1210 13:12:17.540871 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="extract-content" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.540877 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="extract-content" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.541157 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="3038fce5-a66d-441a-b4af-00e5afb26384" containerName="registry-server" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.542328 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.548478 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-67sv6"] Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.696102 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="8d11980c56c7b436b6d741535cc469b576e206b3de67a362d6c36f2e03055365" exitCode=0 Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.696166 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"8d11980c56c7b436b6d741535cc469b576e206b3de67a362d6c36f2e03055365"} Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.714248 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-utilities\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.714326 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-catalog-content\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.714540 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsgwh\" (UniqueName: \"kubernetes.io/projected/ecde3911-ea57-498b-a373-8389ccc0696c-kube-api-access-zsgwh\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.816698 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-utilities\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.816762 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-catalog-content\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.816804 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsgwh\" (UniqueName: \"kubernetes.io/projected/ecde3911-ea57-498b-a373-8389ccc0696c-kube-api-access-zsgwh\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.817264 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-utilities\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.817463 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-catalog-content\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.837936 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsgwh\" (UniqueName: \"kubernetes.io/projected/ecde3911-ea57-498b-a373-8389ccc0696c-kube-api-access-zsgwh\") pod \"redhat-marketplace-67sv6\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:17 crc kubenswrapper[4921]: I1210 13:12:17.870664 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.034332 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.035535 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nkvrx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-pfmfg_openstack(29ac1ff7-0ecb-4414-be96-ee937ba5bbb7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.036748 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.340740 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.341494 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kft8n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-nn5zn_openstack(2fe91281-0f3f-4e50-9701-5d237060f645): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.344616 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" podUID="2fe91281-0f3f-4e50-9701-5d237060f645" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.416069 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.416295 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vd7s5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-rswl8_openstack(6d77f2dd-23bb-44c4-9f82-ef2587a0d872): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.417542 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" podUID="6d77f2dd-23bb-44c4-9f82-ef2587a0d872" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.423846 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.423996 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pd2pj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-42vg9_openstack(cff820e5-df50-4ec5-bd20-4320a33badf1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.425534 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" Dec 10 13:12:23 crc kubenswrapper[4921]: I1210 13:12:23.618602 4921 scope.go:117] "RemoveContainer" containerID="30f010e0c32781f1d4f3e9d3e0c6705fa988eb8d24678482b4f9ab9cfcbe3241" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.775961 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" Dec 10 13:12:23 crc kubenswrapper[4921]: E1210 13:12:23.776038 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" Dec 10 13:12:24 crc kubenswrapper[4921]: I1210 13:12:24.211792 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-f9dft"] Dec 10 13:12:24 crc kubenswrapper[4921]: I1210 13:12:24.252317 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 13:12:24 crc kubenswrapper[4921]: W1210 13:12:24.968360 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49be5ede_571d_40af_b73e_b2f6678df767.slice/crio-1e6fd91fc602534312dbfe771c117503fd7a47c576aadc2a603f492ede9a894e WatchSource:0}: Error finding container 1e6fd91fc602534312dbfe771c117503fd7a47c576aadc2a603f492ede9a894e: Status 404 returned error can't find the container with id 1e6fd91fc602534312dbfe771c117503fd7a47c576aadc2a603f492ede9a894e Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.040576 4921 scope.go:117] "RemoveContainer" containerID="14289058d0fb041d586a9216e5a19d1f702167fc4a4034c67755ae206f4ba1ec" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.053957 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.058777 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.147730 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-config\") pod \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.148155 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vd7s5\" (UniqueName: \"kubernetes.io/projected/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-kube-api-access-vd7s5\") pod \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.148262 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-dns-svc\") pod \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\" (UID: \"6d77f2dd-23bb-44c4-9f82-ef2587a0d872\") " Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.148490 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-config" (OuterVolumeSpecName: "config") pod "6d77f2dd-23bb-44c4-9f82-ef2587a0d872" (UID: "6d77f2dd-23bb-44c4-9f82-ef2587a0d872"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.148747 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.148810 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6d77f2dd-23bb-44c4-9f82-ef2587a0d872" (UID: "6d77f2dd-23bb-44c4-9f82-ef2587a0d872"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.162797 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-kube-api-access-vd7s5" (OuterVolumeSpecName: "kube-api-access-vd7s5") pod "6d77f2dd-23bb-44c4-9f82-ef2587a0d872" (UID: "6d77f2dd-23bb-44c4-9f82-ef2587a0d872"). InnerVolumeSpecName "kube-api-access-vd7s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.188650 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.250165 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fe91281-0f3f-4e50-9701-5d237060f645-config\") pod \"2fe91281-0f3f-4e50-9701-5d237060f645\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.250768 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fe91281-0f3f-4e50-9701-5d237060f645-config" (OuterVolumeSpecName: "config") pod "2fe91281-0f3f-4e50-9701-5d237060f645" (UID: "2fe91281-0f3f-4e50-9701-5d237060f645"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.251078 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kft8n\" (UniqueName: \"kubernetes.io/projected/2fe91281-0f3f-4e50-9701-5d237060f645-kube-api-access-kft8n\") pod \"2fe91281-0f3f-4e50-9701-5d237060f645\" (UID: \"2fe91281-0f3f-4e50-9701-5d237060f645\") " Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.251465 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vd7s5\" (UniqueName: \"kubernetes.io/projected/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-kube-api-access-vd7s5\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.251480 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d77f2dd-23bb-44c4-9f82-ef2587a0d872-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.251491 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fe91281-0f3f-4e50-9701-5d237060f645-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.254249 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe91281-0f3f-4e50-9701-5d237060f645-kube-api-access-kft8n" (OuterVolumeSpecName: "kube-api-access-kft8n") pod "2fe91281-0f3f-4e50-9701-5d237060f645" (UID: "2fe91281-0f3f-4e50-9701-5d237060f645"). InnerVolumeSpecName "kube-api-access-kft8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.353811 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kft8n\" (UniqueName: \"kubernetes.io/projected/2fe91281-0f3f-4e50-9701-5d237060f645-kube-api-access-kft8n\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:25 crc kubenswrapper[4921]: W1210 13:12:25.459123 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5f51319_9413_4553_b21a_6ac18a452855.slice/crio-d6c58e81d28abdf523c75372724a4c85bc31ce6b13599e9319c5731bb8dc971a WatchSource:0}: Error finding container d6c58e81d28abdf523c75372724a4c85bc31ce6b13599e9319c5731bb8dc971a: Status 404 returned error can't find the container with id d6c58e81d28abdf523c75372724a4c85bc31ce6b13599e9319c5731bb8dc971a Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.485794 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-67sv6"] Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.786783 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.786776 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-nn5zn" event={"ID":"2fe91281-0f3f-4e50-9701-5d237060f645","Type":"ContainerDied","Data":"1801e11e04aecd9496637a20446ccbe107090ad375b455da8ee8f1bb8ffa854f"} Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.789651 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-f9dft" event={"ID":"039043c2-b38e-4a56-a3ce-45c9462ed0f1","Type":"ContainerStarted","Data":"63f7b9462d7d05d170f6dc19fde16d862b20a84215bcc4c1e6711c279a4a2267"} Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.793821 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f5f51319-9413-4553-b21a-6ac18a452855","Type":"ContainerStarted","Data":"d6c58e81d28abdf523c75372724a4c85bc31ce6b13599e9319c5731bb8dc971a"} Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.794983 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"49be5ede-571d-40af-b73e-b2f6678df767","Type":"ContainerStarted","Data":"1e6fd91fc602534312dbfe771c117503fd7a47c576aadc2a603f492ede9a894e"} Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.796108 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" event={"ID":"6d77f2dd-23bb-44c4-9f82-ef2587a0d872","Type":"ContainerDied","Data":"f1ae8a1ffb77ac7dd040b7d439a958eec77baab8280d65de06a721194d1d8d3d"} Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.796156 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-rswl8" Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.860351 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rswl8"] Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.897303 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-rswl8"] Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.914958 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nn5zn"] Dec 10 13:12:25 crc kubenswrapper[4921]: I1210 13:12:25.920121 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-nn5zn"] Dec 10 13:12:25 crc kubenswrapper[4921]: E1210 13:12:25.995303 4921 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe91281_0f3f_4e50_9701_5d237060f645.slice/crio-1801e11e04aecd9496637a20446ccbe107090ad375b455da8ee8f1bb8ffa854f\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fe91281_0f3f_4e50_9701_5d237060f645.slice\": RecentStats: unable to find data in memory cache]" Dec 10 13:12:26 crc kubenswrapper[4921]: E1210 13:12:26.111298 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\": context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 10 13:12:26 crc kubenswrapper[4921]: E1210 13:12:26.111990 4921 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\": context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 10 13:12:26 crc kubenswrapper[4921]: E1210 13:12:26.112234 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wxmg9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(4f843ca5-c8e0-4c44-a626-3cb41c83bab3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\": context canceled" logger="UnhandledError" Dec 10 13:12:26 crc kubenswrapper[4921]: E1210 13:12:26.114141 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \\\"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\\\": context canceled\"" pod="openstack/kube-state-metrics-0" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.813175 4921 generic.go:334] "Generic (PLEG): container finished" podID="ecde3911-ea57-498b-a373-8389ccc0696c" containerID="94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06" exitCode=0 Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.813428 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67sv6" event={"ID":"ecde3911-ea57-498b-a373-8389ccc0696c","Type":"ContainerDied","Data":"94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.813696 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67sv6" event={"ID":"ecde3911-ea57-498b-a373-8389ccc0696c","Type":"ContainerStarted","Data":"15658914fef3a7f503df0185db0628f8b1ea7fbb393a1a1b79e02f7b026d885f"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.824599 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c0fbcc23-a641-4787-9edc-7bf8e8e46a79","Type":"ContainerStarted","Data":"ab8133175ed4ae661fb3da608a1fc4a98b564bf9fb6f7a4556755e1ba8810206"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.825461 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.831214 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"18df86ca-4da7-4979-8a7d-1db5b93c42f4","Type":"ContainerStarted","Data":"2be50ad5b188dbe6a656bfec36e5be0f5052d3a0d099b7d89c606a0cdd1834b8"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.835496 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zv79d" event={"ID":"8444cda9-1de8-49a1-81b3-ebf79bae9995","Type":"ContainerStarted","Data":"e735f19d58f79bb1d7ef2bb92a09fdd93e2a5678af9285515c480d32fe01247a"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.842039 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"4971420ec666b0633f92ea9dfa8a109dd0d7730fedda43f31a5af62a9d620d9b"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.848793 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"65febf2e-fbb3-42a6-96e0-b7933c0911dd","Type":"ContainerStarted","Data":"10e0123e1146a2410eb88d36c0fab6cc39991823c4191fa90d321a904c271cf3"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.856538 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8" event={"ID":"be165009-1ecf-4849-8cff-e83071094e81","Type":"ContainerStarted","Data":"1e0eafbb1c5a3ec3cec9c8f66e1436ac3da1ece3b18e5fd83d48461cb6070be4"} Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.856775 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-75zw8" Dec 10 13:12:26 crc kubenswrapper[4921]: E1210 13:12:26.857898 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.892002 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=5.420838904 podStartE2EDuration="43.891979979s" podCreationTimestamp="2025-12-10 13:11:43 +0000 UTC" firstStartedPulling="2025-12-10 13:11:45.159926919 +0000 UTC m=+902.376148843" lastFinishedPulling="2025-12-10 13:12:23.631067994 +0000 UTC m=+940.847289918" observedRunningTime="2025-12-10 13:12:26.888483605 +0000 UTC m=+944.104705549" watchObservedRunningTime="2025-12-10 13:12:26.891979979 +0000 UTC m=+944.108201903" Dec 10 13:12:26 crc kubenswrapper[4921]: I1210 13:12:26.915914 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-75zw8" podStartSLOduration=16.968004751 podStartE2EDuration="38.915894831s" podCreationTimestamp="2025-12-10 13:11:48 +0000 UTC" firstStartedPulling="2025-12-10 13:12:04.114111106 +0000 UTC m=+921.330333030" lastFinishedPulling="2025-12-10 13:12:26.062001186 +0000 UTC m=+943.278223110" observedRunningTime="2025-12-10 13:12:26.908148813 +0000 UTC m=+944.124370747" watchObservedRunningTime="2025-12-10 13:12:26.915894831 +0000 UTC m=+944.132116765" Dec 10 13:12:27 crc kubenswrapper[4921]: I1210 13:12:27.205786 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fe91281-0f3f-4e50-9701-5d237060f645" path="/var/lib/kubelet/pods/2fe91281-0f3f-4e50-9701-5d237060f645/volumes" Dec 10 13:12:27 crc kubenswrapper[4921]: I1210 13:12:27.206285 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d77f2dd-23bb-44c4-9f82-ef2587a0d872" path="/var/lib/kubelet/pods/6d77f2dd-23bb-44c4-9f82-ef2587a0d872/volumes" Dec 10 13:12:27 crc kubenswrapper[4921]: I1210 13:12:27.865228 4921 generic.go:334] "Generic (PLEG): container finished" podID="8444cda9-1de8-49a1-81b3-ebf79bae9995" containerID="e735f19d58f79bb1d7ef2bb92a09fdd93e2a5678af9285515c480d32fe01247a" exitCode=0 Dec 10 13:12:27 crc kubenswrapper[4921]: I1210 13:12:27.866523 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zv79d" event={"ID":"8444cda9-1de8-49a1-81b3-ebf79bae9995","Type":"ContainerDied","Data":"e735f19d58f79bb1d7ef2bb92a09fdd93e2a5678af9285515c480d32fe01247a"} Dec 10 13:12:28 crc kubenswrapper[4921]: I1210 13:12:28.876329 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f5f51319-9413-4553-b21a-6ac18a452855","Type":"ContainerStarted","Data":"d77c9985e30172f57286dba5e10a3e91333892078e22ff1a85fcd059c733f796"} Dec 10 13:12:28 crc kubenswrapper[4921]: I1210 13:12:28.881064 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"49be5ede-571d-40af-b73e-b2f6678df767","Type":"ContainerStarted","Data":"df666f4ad1b5dc5a9cb24b63420834c7443625731bf66dcf5fcafd77ce71958d"} Dec 10 13:12:28 crc kubenswrapper[4921]: I1210 13:12:28.883329 4921 generic.go:334] "Generic (PLEG): container finished" podID="ecde3911-ea57-498b-a373-8389ccc0696c" containerID="a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e" exitCode=0 Dec 10 13:12:28 crc kubenswrapper[4921]: I1210 13:12:28.883412 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67sv6" event={"ID":"ecde3911-ea57-498b-a373-8389ccc0696c","Type":"ContainerDied","Data":"a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e"} Dec 10 13:12:28 crc kubenswrapper[4921]: I1210 13:12:28.885519 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zv79d" event={"ID":"8444cda9-1de8-49a1-81b3-ebf79bae9995","Type":"ContainerStarted","Data":"f31f16a5e998f0d1b059b25ff9e5e5e89b57fef0ae8fe6f6d884b9342d9753fc"} Dec 10 13:12:29 crc kubenswrapper[4921]: I1210 13:12:29.909238 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-zv79d" event={"ID":"8444cda9-1de8-49a1-81b3-ebf79bae9995","Type":"ContainerStarted","Data":"3a0aa49d01e7c4b0e87e2a8b37ded794fa7b1a254267e136cc3836902eb03eab"} Dec 10 13:12:29 crc kubenswrapper[4921]: I1210 13:12:29.909609 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:12:29 crc kubenswrapper[4921]: I1210 13:12:29.909624 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:12:29 crc kubenswrapper[4921]: I1210 13:12:29.913679 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5","Type":"ContainerStarted","Data":"be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9"} Dec 10 13:12:29 crc kubenswrapper[4921]: I1210 13:12:29.937256 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-zv79d" podStartSLOduration=19.98202196 podStartE2EDuration="41.937235327s" podCreationTimestamp="2025-12-10 13:11:48 +0000 UTC" firstStartedPulling="2025-12-10 13:12:03.317639443 +0000 UTC m=+920.533861387" lastFinishedPulling="2025-12-10 13:12:25.27285283 +0000 UTC m=+942.489074754" observedRunningTime="2025-12-10 13:12:29.924479074 +0000 UTC m=+947.140701008" watchObservedRunningTime="2025-12-10 13:12:29.937235327 +0000 UTC m=+947.153457251" Dec 10 13:12:34 crc kubenswrapper[4921]: I1210 13:12:34.211381 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 10 13:12:41 crc kubenswrapper[4921]: E1210 13:12:41.378698 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified" Dec 10 13:12:41 crc kubenswrapper[4921]: E1210 13:12:41.379541 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstack-network-exporter,Image:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,Command:[/app/openstack-network-exporter],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPENSTACK_NETWORK_EXPORTER_YAML,Value:/etc/config/openstack-network-exporter.yaml,ValueFrom:nil,},EnvVar{Name:CONFIG_HASH,Value:n76h65ch7fh588hb4h9ch74h68fh566h5ffh677h5dh57ch546hc5h6fh548h99h56ch5d9h544h687h695h5fdh584h596h659h84h657h95h694hf9q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ovs-rundir,ReadOnly:true,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-rundir,ReadOnly:true,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovnmetrics.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovnmetrics.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v6d4x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-metrics-f9dft_openstack(039043c2-b38e-4a56-a3ce-45c9462ed0f1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:12:41 crc kubenswrapper[4921]: E1210 13:12:41.380818 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-metrics-f9dft" podUID="039043c2-b38e-4a56-a3ce-45c9462ed0f1" Dec 10 13:12:41 crc kubenswrapper[4921]: I1210 13:12:41.543807 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c651083f-4dd3-4963-892f-ddbc5ef1af05","Type":"ContainerStarted","Data":"ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.550984 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-f9dft" event={"ID":"039043c2-b38e-4a56-a3ce-45c9462ed0f1","Type":"ContainerStarted","Data":"e299d8eb5dd7989532f14ce4aeae05674e0f8ab1265ec2be92a642f908abcd76"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.553864 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f5f51319-9413-4553-b21a-6ac18a452855","Type":"ContainerStarted","Data":"05940131307a508ac870f764f05132a22ba489e3a63de679f1241374b990b483"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.555784 4921 generic.go:334] "Generic (PLEG): container finished" podID="65febf2e-fbb3-42a6-96e0-b7933c0911dd" containerID="10e0123e1146a2410eb88d36c0fab6cc39991823c4191fa90d321a904c271cf3" exitCode=0 Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.555848 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"65febf2e-fbb3-42a6-96e0-b7933c0911dd","Type":"ContainerDied","Data":"10e0123e1146a2410eb88d36c0fab6cc39991823c4191fa90d321a904c271cf3"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.559480 4921 generic.go:334] "Generic (PLEG): container finished" podID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerID="dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034" exitCode=0 Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.559527 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" event={"ID":"cff820e5-df50-4ec5-bd20-4320a33badf1","Type":"ContainerDied","Data":"dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.561482 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"49be5ede-571d-40af-b73e-b2f6678df767","Type":"ContainerStarted","Data":"2f5ccc3030123958a1d2d06594608ee81134f51004dc4981f184ac9a3507c810"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.564264 4921 generic.go:334] "Generic (PLEG): container finished" podID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerID="0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3" exitCode=0 Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.564347 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" event={"ID":"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7","Type":"ContainerDied","Data":"0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.580504 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67sv6" event={"ID":"ecde3911-ea57-498b-a373-8389ccc0696c","Type":"ContainerStarted","Data":"f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.582327 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-f9dft" podStartSLOduration=-9223371984.272465 podStartE2EDuration="52.582310892s" podCreationTimestamp="2025-12-10 13:11:50 +0000 UTC" firstStartedPulling="2025-12-10 13:12:25.046306021 +0000 UTC m=+942.262527945" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:12:42.580233186 +0000 UTC m=+959.796455120" watchObservedRunningTime="2025-12-10 13:12:42.582310892 +0000 UTC m=+959.798532806" Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.611855 4921 generic.go:334] "Generic (PLEG): container finished" podID="18df86ca-4da7-4979-8a7d-1db5b93c42f4" containerID="2be50ad5b188dbe6a656bfec36e5be0f5052d3a0d099b7d89c606a0cdd1834b8" exitCode=0 Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.612187 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"18df86ca-4da7-4979-8a7d-1db5b93c42f4","Type":"ContainerDied","Data":"2be50ad5b188dbe6a656bfec36e5be0f5052d3a0d099b7d89c606a0cdd1834b8"} Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.745630 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=36.734258516 podStartE2EDuration="52.745609244s" podCreationTimestamp="2025-12-10 13:11:50 +0000 UTC" firstStartedPulling="2025-12-10 13:12:25.466509967 +0000 UTC m=+942.682731891" lastFinishedPulling="2025-12-10 13:12:41.477860695 +0000 UTC m=+958.694082619" observedRunningTime="2025-12-10 13:12:42.671772552 +0000 UTC m=+959.887994466" watchObservedRunningTime="2025-12-10 13:12:42.745609244 +0000 UTC m=+959.961831168" Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.755632 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=34.122402586 podStartE2EDuration="50.755615272s" podCreationTimestamp="2025-12-10 13:11:52 +0000 UTC" firstStartedPulling="2025-12-10 13:12:24.97100326 +0000 UTC m=+942.187225184" lastFinishedPulling="2025-12-10 13:12:41.604215946 +0000 UTC m=+958.820437870" observedRunningTime="2025-12-10 13:12:42.7100771 +0000 UTC m=+959.926299024" watchObservedRunningTime="2025-12-10 13:12:42.755615272 +0000 UTC m=+959.971837196" Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.770912 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-67sv6" podStartSLOduration=11.666505236 podStartE2EDuration="25.770890692s" podCreationTimestamp="2025-12-10 13:12:17 +0000 UTC" firstStartedPulling="2025-12-10 13:12:27.572469709 +0000 UTC m=+944.788691643" lastFinishedPulling="2025-12-10 13:12:41.676855175 +0000 UTC m=+958.893077099" observedRunningTime="2025-12-10 13:12:42.741120263 +0000 UTC m=+959.957342187" watchObservedRunningTime="2025-12-10 13:12:42.770890692 +0000 UTC m=+959.987112616" Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.871220 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-pfmfg"] Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.918593 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-b8bwc"] Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.920828 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.923434 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 10 13:12:42 crc kubenswrapper[4921]: I1210 13:12:42.942961 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-b8bwc"] Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.060192 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.060659 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.060774 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-config\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.060872 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxcxh\" (UniqueName: \"kubernetes.io/projected/b72dcc76-4198-4feb-a942-6a9073765a8e-kube-api-access-wxcxh\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.121048 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-42vg9"] Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.153261 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.162056 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.162355 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.162482 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-config\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.162684 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxcxh\" (UniqueName: \"kubernetes.io/projected/b72dcc76-4198-4feb-a942-6a9073765a8e-kube-api-access-wxcxh\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.163190 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.163789 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-config\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.166307 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.177142 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.186793 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-ns77l"] Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.188484 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.195602 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxcxh\" (UniqueName: \"kubernetes.io/projected/b72dcc76-4198-4feb-a942-6a9073765a8e-kube-api-access-wxcxh\") pod \"dnsmasq-dns-5bf47b49b7-b8bwc\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.205240 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.232476 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ns77l"] Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.252594 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.264863 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-config\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.264944 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.264965 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pshck\" (UniqueName: \"kubernetes.io/projected/f88eabcd-9eab-4760-b3b7-3d07479f164c-kube-api-access-pshck\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.265002 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.265057 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-dns-svc\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.275198 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.367650 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-dns-svc\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.367708 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-config\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.367781 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.367802 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pshck\" (UniqueName: \"kubernetes.io/projected/f88eabcd-9eab-4760-b3b7-3d07479f164c-kube-api-access-pshck\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.367850 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.369152 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-dns-svc\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.369554 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-config\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.370023 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.373088 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.393556 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pshck\" (UniqueName: \"kubernetes.io/projected/f88eabcd-9eab-4760-b3b7-3d07479f164c-kube-api-access-pshck\") pod \"dnsmasq-dns-8554648995-ns77l\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.544752 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.613733 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.622925 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"65febf2e-fbb3-42a6-96e0-b7933c0911dd","Type":"ContainerStarted","Data":"7969d75fa60a516460965bc0b9e5a0bae69a213124186cf6ad2e3b6a1a157837"} Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.626301 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" event={"ID":"cff820e5-df50-4ec5-bd20-4320a33badf1","Type":"ContainerStarted","Data":"9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5"} Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.626433 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.626457 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerName="dnsmasq-dns" containerID="cri-o://9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5" gracePeriod=10 Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.630003 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" event={"ID":"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7","Type":"ContainerStarted","Data":"5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18"} Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.630125 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerName="dnsmasq-dns" containerID="cri-o://5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18" gracePeriod=10 Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.630196 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.636573 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"18df86ca-4da7-4979-8a7d-1db5b93c42f4","Type":"ContainerStarted","Data":"16277b13498ce9ae783508033536fc5669759a9d2af976a6fca9858ea0c0fc9c"} Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.636912 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.653960 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=25.059408518 podStartE2EDuration="1m2.653942649s" podCreationTimestamp="2025-12-10 13:11:41 +0000 UTC" firstStartedPulling="2025-12-10 13:11:43.437477788 +0000 UTC m=+900.653699712" lastFinishedPulling="2025-12-10 13:12:21.032011919 +0000 UTC m=+938.248233843" observedRunningTime="2025-12-10 13:12:43.643859158 +0000 UTC m=+960.860081092" watchObservedRunningTime="2025-12-10 13:12:43.653942649 +0000 UTC m=+960.870164573" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.683363 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" podStartSLOduration=4.147607029 podStartE2EDuration="1m4.683343208s" podCreationTimestamp="2025-12-10 13:11:39 +0000 UTC" firstStartedPulling="2025-12-10 13:11:40.983751353 +0000 UTC m=+898.199973277" lastFinishedPulling="2025-12-10 13:12:41.519487532 +0000 UTC m=+958.735709456" observedRunningTime="2025-12-10 13:12:43.678334893 +0000 UTC m=+960.894556817" watchObservedRunningTime="2025-12-10 13:12:43.683343208 +0000 UTC m=+960.899565132" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.704434 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=21.543441063 podStartE2EDuration="1m1.704414263s" podCreationTimestamp="2025-12-10 13:11:42 +0000 UTC" firstStartedPulling="2025-12-10 13:11:45.1100397 +0000 UTC m=+902.326261624" lastFinishedPulling="2025-12-10 13:12:25.2710129 +0000 UTC m=+942.487234824" observedRunningTime="2025-12-10 13:12:43.703531169 +0000 UTC m=+960.919753103" watchObservedRunningTime="2025-12-10 13:12:43.704414263 +0000 UTC m=+960.920636197" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.731260 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" podStartSLOduration=4.365553764 podStartE2EDuration="1m5.731241193s" podCreationTimestamp="2025-12-10 13:11:38 +0000 UTC" firstStartedPulling="2025-12-10 13:11:40.113109811 +0000 UTC m=+897.329331735" lastFinishedPulling="2025-12-10 13:12:41.47879724 +0000 UTC m=+958.695019164" observedRunningTime="2025-12-10 13:12:43.726882356 +0000 UTC m=+960.943104280" watchObservedRunningTime="2025-12-10 13:12:43.731241193 +0000 UTC m=+960.947463117" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.732365 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 10 13:12:43 crc kubenswrapper[4921]: I1210 13:12:43.781448 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-b8bwc"] Dec 10 13:12:43 crc kubenswrapper[4921]: W1210 13:12:43.801185 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb72dcc76_4198_4feb_a942_6a9073765a8e.slice/crio-87faa32da607eb32b450603777153278f4a3622c24687a898bc14f4b23727915 WatchSource:0}: Error finding container 87faa32da607eb32b450603777153278f4a3622c24687a898bc14f4b23727915: Status 404 returned error can't find the container with id 87faa32da607eb32b450603777153278f4a3622c24687a898bc14f4b23727915 Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.030015 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.030073 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.065616 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ns77l"] Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.121222 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.179423 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-config\") pod \"cff820e5-df50-4ec5-bd20-4320a33badf1\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.179490 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-dns-svc\") pod \"cff820e5-df50-4ec5-bd20-4320a33badf1\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.179520 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd2pj\" (UniqueName: \"kubernetes.io/projected/cff820e5-df50-4ec5-bd20-4320a33badf1-kube-api-access-pd2pj\") pod \"cff820e5-df50-4ec5-bd20-4320a33badf1\" (UID: \"cff820e5-df50-4ec5-bd20-4320a33badf1\") " Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.186668 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff820e5-df50-4ec5-bd20-4320a33badf1-kube-api-access-pd2pj" (OuterVolumeSpecName: "kube-api-access-pd2pj") pod "cff820e5-df50-4ec5-bd20-4320a33badf1" (UID: "cff820e5-df50-4ec5-bd20-4320a33badf1"). InnerVolumeSpecName "kube-api-access-pd2pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.194189 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.264553 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-config" (OuterVolumeSpecName: "config") pod "cff820e5-df50-4ec5-bd20-4320a33badf1" (UID: "cff820e5-df50-4ec5-bd20-4320a33badf1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.278927 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cff820e5-df50-4ec5-bd20-4320a33badf1" (UID: "cff820e5-df50-4ec5-bd20-4320a33badf1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.281860 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-config\") pod \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.281947 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkvrx\" (UniqueName: \"kubernetes.io/projected/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-kube-api-access-nkvrx\") pod \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.282022 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-dns-svc\") pod \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\" (UID: \"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7\") " Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.282368 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.282386 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd2pj\" (UniqueName: \"kubernetes.io/projected/cff820e5-df50-4ec5-bd20-4320a33badf1-kube-api-access-pd2pj\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.282407 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff820e5-df50-4ec5-bd20-4320a33badf1-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.297462 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-kube-api-access-nkvrx" (OuterVolumeSpecName: "kube-api-access-nkvrx") pod "29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" (UID: "29ac1ff7-0ecb-4414-be96-ee937ba5bbb7"). InnerVolumeSpecName "kube-api-access-nkvrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.319640 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" (UID: "29ac1ff7-0ecb-4414-be96-ee937ba5bbb7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.324805 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-config" (OuterVolumeSpecName: "config") pod "29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" (UID: "29ac1ff7-0ecb-4414-be96-ee937ba5bbb7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.384522 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.384609 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkvrx\" (UniqueName: \"kubernetes.io/projected/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-kube-api-access-nkvrx\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.384626 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.614377 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.646215 4921 generic.go:334] "Generic (PLEG): container finished" podID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerID="e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82" exitCode=0 Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.646278 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" event={"ID":"b72dcc76-4198-4feb-a942-6a9073765a8e","Type":"ContainerDied","Data":"e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.646307 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" event={"ID":"b72dcc76-4198-4feb-a942-6a9073765a8e","Type":"ContainerStarted","Data":"87faa32da607eb32b450603777153278f4a3622c24687a898bc14f4b23727915"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.648981 4921 generic.go:334] "Generic (PLEG): container finished" podID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerID="1c26313d1317751dda9afc9448652f5180b6d63a8b3fc52d42240ca787879766" exitCode=0 Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.649023 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ns77l" event={"ID":"f88eabcd-9eab-4760-b3b7-3d07479f164c","Type":"ContainerDied","Data":"1c26313d1317751dda9afc9448652f5180b6d63a8b3fc52d42240ca787879766"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.649039 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ns77l" event={"ID":"f88eabcd-9eab-4760-b3b7-3d07479f164c","Type":"ContainerStarted","Data":"43eef6fb50fd44d7f742addd7093f6b257e97d3fa7ff0098afe7c1a158a39f97"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.656576 4921 generic.go:334] "Generic (PLEG): container finished" podID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerID="9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5" exitCode=0 Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.656682 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" event={"ID":"cff820e5-df50-4ec5-bd20-4320a33badf1","Type":"ContainerDied","Data":"9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.656694 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.656718 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-42vg9" event={"ID":"cff820e5-df50-4ec5-bd20-4320a33badf1","Type":"ContainerDied","Data":"4ecbc90e9fc3ff6d3ebc0179852cfa1407130626c52f2019bfaf93def16cdf5f"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.656740 4921 scope.go:117] "RemoveContainer" containerID="9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.662281 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.664735 4921 generic.go:334] "Generic (PLEG): container finished" podID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerID="5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18" exitCode=0 Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.668350 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.674096 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" event={"ID":"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7","Type":"ContainerDied","Data":"5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.675648 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-pfmfg" event={"ID":"29ac1ff7-0ecb-4414-be96-ee937ba5bbb7","Type":"ContainerDied","Data":"40e85c94a1efff0842cfcf74e37cf86498edf599a7b36a6eef3c885d99465dac"} Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.758620 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.844004 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-pfmfg"] Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.847110 4921 scope.go:117] "RemoveContainer" containerID="dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.850483 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-pfmfg"] Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.856310 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-42vg9"] Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.862374 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-42vg9"] Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.881366 4921 scope.go:117] "RemoveContainer" containerID="9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.882429 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5\": container with ID starting with 9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5 not found: ID does not exist" containerID="9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.882474 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5"} err="failed to get container status \"9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5\": rpc error: code = NotFound desc = could not find container \"9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5\": container with ID starting with 9e47fc8744b969a2ea4494894ad9526eadb8e12b2c47da7a3f7916d37316eab5 not found: ID does not exist" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.882498 4921 scope.go:117] "RemoveContainer" containerID="dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.882919 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034\": container with ID starting with dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034 not found: ID does not exist" containerID="dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.882973 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034"} err="failed to get container status \"dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034\": rpc error: code = NotFound desc = could not find container \"dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034\": container with ID starting with dea0d273c20d455d93c0c3fc20dffcd77ba6eaa6aadc1360d9205a41c8832034 not found: ID does not exist" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.883002 4921 scope.go:117] "RemoveContainer" containerID="5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.904070 4921 scope.go:117] "RemoveContainer" containerID="0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.933343 4921 scope.go:117] "RemoveContainer" containerID="5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.934333 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18\": container with ID starting with 5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18 not found: ID does not exist" containerID="5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.934363 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18"} err="failed to get container status \"5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18\": rpc error: code = NotFound desc = could not find container \"5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18\": container with ID starting with 5fef25da163d77225028acb9dd5ff2a00765de1675d522b96b47a0c6a417ed18 not found: ID does not exist" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.934382 4921 scope.go:117] "RemoveContainer" containerID="0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.937355 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3\": container with ID starting with 0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3 not found: ID does not exist" containerID="0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.937381 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3"} err="failed to get container status \"0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3\": rpc error: code = NotFound desc = could not find container \"0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3\": container with ID starting with 0dbf00640d21da79fd150c37e75d33ed5e7b63a57611c739ba4bdeb310d85df3 not found: ID does not exist" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.988576 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.988858 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerName="dnsmasq-dns" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.988874 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerName="dnsmasq-dns" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.988890 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerName="dnsmasq-dns" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.988897 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerName="dnsmasq-dns" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.988907 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerName="init" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.988913 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerName="init" Dec 10 13:12:44 crc kubenswrapper[4921]: E1210 13:12:44.988935 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerName="init" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.988940 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerName="init" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.989082 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" containerName="dnsmasq-dns" Dec 10 13:12:44 crc kubenswrapper[4921]: I1210 13:12:44.989104 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" containerName="dnsmasq-dns" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.000409 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.005532 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.005799 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-rmj7h" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.005962 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.010545 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.017741 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112512 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bed38314-496b-47e1-bd8b-32cc8f05678e-config\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112567 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-797rj\" (UniqueName: \"kubernetes.io/projected/bed38314-496b-47e1-bd8b-32cc8f05678e-kube-api-access-797rj\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112595 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112635 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112656 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bed38314-496b-47e1-bd8b-32cc8f05678e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112693 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.112721 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bed38314-496b-47e1-bd8b-32cc8f05678e-scripts\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.205227 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29ac1ff7-0ecb-4414-be96-ee937ba5bbb7" path="/var/lib/kubelet/pods/29ac1ff7-0ecb-4414-be96-ee937ba5bbb7/volumes" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.205860 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cff820e5-df50-4ec5-bd20-4320a33badf1" path="/var/lib/kubelet/pods/cff820e5-df50-4ec5-bd20-4320a33badf1/volumes" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214182 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214234 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bed38314-496b-47e1-bd8b-32cc8f05678e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214276 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214308 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bed38314-496b-47e1-bd8b-32cc8f05678e-scripts\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214356 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bed38314-496b-47e1-bd8b-32cc8f05678e-config\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214372 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-797rj\" (UniqueName: \"kubernetes.io/projected/bed38314-496b-47e1-bd8b-32cc8f05678e-kube-api-access-797rj\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214411 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.214740 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bed38314-496b-47e1-bd8b-32cc8f05678e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.215343 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bed38314-496b-47e1-bd8b-32cc8f05678e-config\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.215343 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bed38314-496b-47e1-bd8b-32cc8f05678e-scripts\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.221935 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.223290 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.228148 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed38314-496b-47e1-bd8b-32cc8f05678e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.239247 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-797rj\" (UniqueName: \"kubernetes.io/projected/bed38314-496b-47e1-bd8b-32cc8f05678e-kube-api-access-797rj\") pod \"ovn-northd-0\" (UID: \"bed38314-496b-47e1-bd8b-32cc8f05678e\") " pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.368292 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.679882 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" event={"ID":"b72dcc76-4198-4feb-a942-6a9073765a8e","Type":"ContainerStarted","Data":"c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4"} Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.679990 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.681574 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ns77l" event={"ID":"f88eabcd-9eab-4760-b3b7-3d07479f164c","Type":"ContainerStarted","Data":"1fc9773bac597bbf2a49c255dcb232febc0db6995f6601c2a733b9c4fddc9557"} Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.704362 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" podStartSLOduration=3.70434155 podStartE2EDuration="3.70434155s" podCreationTimestamp="2025-12-10 13:12:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:12:45.697999349 +0000 UTC m=+962.914221273" watchObservedRunningTime="2025-12-10 13:12:45.70434155 +0000 UTC m=+962.920563474" Dec 10 13:12:45 crc kubenswrapper[4921]: I1210 13:12:45.722298 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-ns77l" podStartSLOduration=2.7222808609999998 podStartE2EDuration="2.722280861s" podCreationTimestamp="2025-12-10 13:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:12:45.718436648 +0000 UTC m=+962.934658572" watchObservedRunningTime="2025-12-10 13:12:45.722280861 +0000 UTC m=+962.938502785" Dec 10 13:12:46 crc kubenswrapper[4921]: I1210 13:12:46.257616 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 13:12:46 crc kubenswrapper[4921]: I1210 13:12:46.689177 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"bed38314-496b-47e1-bd8b-32cc8f05678e","Type":"ContainerStarted","Data":"73f319b6f5358e26900f4e96e1b4a5c873cd463e640d066e9af384d6f60c9a2b"} Dec 10 13:12:46 crc kubenswrapper[4921]: I1210 13:12:46.691098 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4f843ca5-c8e0-4c44-a626-3cb41c83bab3","Type":"ContainerStarted","Data":"d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba"} Dec 10 13:12:46 crc kubenswrapper[4921]: I1210 13:12:46.692028 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:46 crc kubenswrapper[4921]: I1210 13:12:46.711583 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.720255525 podStartE2EDuration="1m1.711559908s" podCreationTimestamp="2025-12-10 13:11:45 +0000 UTC" firstStartedPulling="2025-12-10 13:11:46.866601337 +0000 UTC m=+904.082823261" lastFinishedPulling="2025-12-10 13:12:45.85790572 +0000 UTC m=+963.074127644" observedRunningTime="2025-12-10 13:12:46.704286143 +0000 UTC m=+963.920508067" watchObservedRunningTime="2025-12-10 13:12:46.711559908 +0000 UTC m=+963.927781842" Dec 10 13:12:47 crc kubenswrapper[4921]: I1210 13:12:47.871877 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:47 crc kubenswrapper[4921]: I1210 13:12:47.872489 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:47 crc kubenswrapper[4921]: I1210 13:12:47.922114 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:48 crc kubenswrapper[4921]: I1210 13:12:48.706447 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"bed38314-496b-47e1-bd8b-32cc8f05678e","Type":"ContainerStarted","Data":"a677dd9b18daa7cf8d6b75fe626564bef1d266ca7d346ac78b93a5f4b7d37070"} Dec 10 13:12:48 crc kubenswrapper[4921]: I1210 13:12:48.706487 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"bed38314-496b-47e1-bd8b-32cc8f05678e","Type":"ContainerStarted","Data":"bfe47381014b1617402fc1cf34ec1639600d7fc9c569e416f4263f1fa2557b35"} Dec 10 13:12:48 crc kubenswrapper[4921]: I1210 13:12:48.737348 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.423006829 podStartE2EDuration="4.737328269s" podCreationTimestamp="2025-12-10 13:12:44 +0000 UTC" firstStartedPulling="2025-12-10 13:12:46.267708767 +0000 UTC m=+963.483930681" lastFinishedPulling="2025-12-10 13:12:47.582030207 +0000 UTC m=+964.798252121" observedRunningTime="2025-12-10 13:12:48.72359271 +0000 UTC m=+965.939814634" watchObservedRunningTime="2025-12-10 13:12:48.737328269 +0000 UTC m=+965.953550193" Dec 10 13:12:48 crc kubenswrapper[4921]: I1210 13:12:48.756490 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:49 crc kubenswrapper[4921]: I1210 13:12:49.155096 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-67sv6"] Dec 10 13:12:49 crc kubenswrapper[4921]: I1210 13:12:49.714178 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 10 13:12:50 crc kubenswrapper[4921]: I1210 13:12:50.157073 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 10 13:12:50 crc kubenswrapper[4921]: I1210 13:12:50.253750 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 10 13:12:50 crc kubenswrapper[4921]: I1210 13:12:50.721510 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-67sv6" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="registry-server" containerID="cri-o://f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b" gracePeriod=2 Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.101161 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.215100 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-utilities\") pod \"ecde3911-ea57-498b-a373-8389ccc0696c\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.215343 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsgwh\" (UniqueName: \"kubernetes.io/projected/ecde3911-ea57-498b-a373-8389ccc0696c-kube-api-access-zsgwh\") pod \"ecde3911-ea57-498b-a373-8389ccc0696c\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.215400 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-catalog-content\") pod \"ecde3911-ea57-498b-a373-8389ccc0696c\" (UID: \"ecde3911-ea57-498b-a373-8389ccc0696c\") " Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.216248 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-utilities" (OuterVolumeSpecName: "utilities") pod "ecde3911-ea57-498b-a373-8389ccc0696c" (UID: "ecde3911-ea57-498b-a373-8389ccc0696c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.229777 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecde3911-ea57-498b-a373-8389ccc0696c-kube-api-access-zsgwh" (OuterVolumeSpecName: "kube-api-access-zsgwh") pod "ecde3911-ea57-498b-a373-8389ccc0696c" (UID: "ecde3911-ea57-498b-a373-8389ccc0696c"). InnerVolumeSpecName "kube-api-access-zsgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.242103 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecde3911-ea57-498b-a373-8389ccc0696c" (UID: "ecde3911-ea57-498b-a373-8389ccc0696c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.317505 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsgwh\" (UniqueName: \"kubernetes.io/projected/ecde3911-ea57-498b-a373-8389ccc0696c-kube-api-access-zsgwh\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.317539 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.317552 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecde3911-ea57-498b-a373-8389ccc0696c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.732683 4921 generic.go:334] "Generic (PLEG): container finished" podID="ecde3911-ea57-498b-a373-8389ccc0696c" containerID="f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b" exitCode=0 Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.732737 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67sv6" event={"ID":"ecde3911-ea57-498b-a373-8389ccc0696c","Type":"ContainerDied","Data":"f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b"} Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.732767 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67sv6" event={"ID":"ecde3911-ea57-498b-a373-8389ccc0696c","Type":"ContainerDied","Data":"15658914fef3a7f503df0185db0628f8b1ea7fbb393a1a1b79e02f7b026d885f"} Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.732788 4921 scope.go:117] "RemoveContainer" containerID="f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.732945 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67sv6" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.759106 4921 scope.go:117] "RemoveContainer" containerID="a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.765473 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-67sv6"] Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.780941 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-67sv6"] Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.796596 4921 scope.go:117] "RemoveContainer" containerID="94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.825721 4921 scope.go:117] "RemoveContainer" containerID="f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b" Dec 10 13:12:51 crc kubenswrapper[4921]: E1210 13:12:51.826208 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b\": container with ID starting with f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b not found: ID does not exist" containerID="f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.826240 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b"} err="failed to get container status \"f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b\": rpc error: code = NotFound desc = could not find container \"f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b\": container with ID starting with f21a53403c5863dc102f525d67cae424fc38b935a577cff27421e0e024f0ca7b not found: ID does not exist" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.826289 4921 scope.go:117] "RemoveContainer" containerID="a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e" Dec 10 13:12:51 crc kubenswrapper[4921]: E1210 13:12:51.826789 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e\": container with ID starting with a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e not found: ID does not exist" containerID="a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.826919 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e"} err="failed to get container status \"a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e\": rpc error: code = NotFound desc = could not find container \"a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e\": container with ID starting with a3b91dd196be9d2b3f2fb44ab22145ad2b83c059fb06943eefda22237a7d9c2e not found: ID does not exist" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.827103 4921 scope.go:117] "RemoveContainer" containerID="94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06" Dec 10 13:12:51 crc kubenswrapper[4921]: E1210 13:12:51.827768 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06\": container with ID starting with 94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06 not found: ID does not exist" containerID="94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06" Dec 10 13:12:51 crc kubenswrapper[4921]: I1210 13:12:51.827803 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06"} err="failed to get container status \"94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06\": rpc error: code = NotFound desc = could not find container \"94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06\": container with ID starting with 94bfa87325d0e625218b5c421fc2daa5eebb59f35a43c984442a046410b39a06 not found: ID does not exist" Dec 10 13:12:52 crc kubenswrapper[4921]: I1210 13:12:52.653666 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 10 13:12:52 crc kubenswrapper[4921]: I1210 13:12:52.653744 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 10 13:12:52 crc kubenswrapper[4921]: I1210 13:12:52.750968 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 10 13:12:52 crc kubenswrapper[4921]: I1210 13:12:52.829693 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.202971 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" path="/var/lib/kubelet/pods/ecde3911-ea57-498b-a373-8389ccc0696c/volumes" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.254993 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.547178 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.604232 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-b8bwc"] Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.747928 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerName="dnsmasq-dns" containerID="cri-o://c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4" gracePeriod=10 Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.833023 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-f7n4f"] Dec 10 13:12:53 crc kubenswrapper[4921]: E1210 13:12:53.833324 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="extract-utilities" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.833336 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="extract-utilities" Dec 10 13:12:53 crc kubenswrapper[4921]: E1210 13:12:53.833345 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="extract-content" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.833351 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="extract-content" Dec 10 13:12:53 crc kubenswrapper[4921]: E1210 13:12:53.833376 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="registry-server" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.833382 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="registry-server" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.833531 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecde3911-ea57-498b-a373-8389ccc0696c" containerName="registry-server" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.834024 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.851912 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-f7n4f"] Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.946584 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9f3a-account-create-update-fl26p"] Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.950183 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.951821 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.972677 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2082e853-50b5-45c4-ba0a-d27cdd8e702a-operator-scripts\") pod \"keystone-db-create-f7n4f\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.972734 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6n88\" (UniqueName: \"kubernetes.io/projected/2082e853-50b5-45c4-ba0a-d27cdd8e702a-kube-api-access-h6n88\") pod \"keystone-db-create-f7n4f\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:53 crc kubenswrapper[4921]: I1210 13:12:53.979956 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9f3a-account-create-update-fl26p"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.075044 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6179226-7f67-4fad-95fd-7542f4abdcff-operator-scripts\") pod \"keystone-9f3a-account-create-update-fl26p\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.075325 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2082e853-50b5-45c4-ba0a-d27cdd8e702a-operator-scripts\") pod \"keystone-db-create-f7n4f\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.075465 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6n88\" (UniqueName: \"kubernetes.io/projected/2082e853-50b5-45c4-ba0a-d27cdd8e702a-kube-api-access-h6n88\") pod \"keystone-db-create-f7n4f\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.075509 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d5b7\" (UniqueName: \"kubernetes.io/projected/f6179226-7f67-4fad-95fd-7542f4abdcff-kube-api-access-4d5b7\") pod \"keystone-9f3a-account-create-update-fl26p\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.076202 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2082e853-50b5-45c4-ba0a-d27cdd8e702a-operator-scripts\") pod \"keystone-db-create-f7n4f\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.100111 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6n88\" (UniqueName: \"kubernetes.io/projected/2082e853-50b5-45c4-ba0a-d27cdd8e702a-kube-api-access-h6n88\") pod \"keystone-db-create-f7n4f\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.180467 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-lgf2r"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.181367 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d5b7\" (UniqueName: \"kubernetes.io/projected/f6179226-7f67-4fad-95fd-7542f4abdcff-kube-api-access-4d5b7\") pod \"keystone-9f3a-account-create-update-fl26p\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.181486 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.181501 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6179226-7f67-4fad-95fd-7542f4abdcff-operator-scripts\") pod \"keystone-9f3a-account-create-update-fl26p\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.182182 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6179226-7f67-4fad-95fd-7542f4abdcff-operator-scripts\") pod \"keystone-9f3a-account-create-update-fl26p\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.191495 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lgf2r"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.214870 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-f6ee-account-create-update-w9mnd"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.217732 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.223004 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.233337 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f6ee-account-create-update-w9mnd"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.247418 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d5b7\" (UniqueName: \"kubernetes.io/projected/f6179226-7f67-4fad-95fd-7542f4abdcff-kube-api-access-4d5b7\") pod \"keystone-9f3a-account-create-update-fl26p\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.261999 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.278900 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.291653 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p27t2\" (UniqueName: \"kubernetes.io/projected/39c24196-2136-47b7-9aa5-372cf1de38db-kube-api-access-p27t2\") pod \"placement-f6ee-account-create-update-w9mnd\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.291805 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39c24196-2136-47b7-9aa5-372cf1de38db-operator-scripts\") pod \"placement-f6ee-account-create-update-w9mnd\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.291833 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/880b1430-d97c-499e-983f-6ec48060e96b-operator-scripts\") pod \"placement-db-create-lgf2r\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.291863 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b89r\" (UniqueName: \"kubernetes.io/projected/880b1430-d97c-499e-983f-6ec48060e96b-kube-api-access-4b89r\") pod \"placement-db-create-lgf2r\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.335310 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.370347 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-k8zlg"] Dec 10 13:12:54 crc kubenswrapper[4921]: E1210 13:12:54.370887 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerName="init" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.370956 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerName="init" Dec 10 13:12:54 crc kubenswrapper[4921]: E1210 13:12:54.371026 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerName="dnsmasq-dns" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.371082 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerName="dnsmasq-dns" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.371286 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerName="dnsmasq-dns" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.372034 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.390459 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k8zlg"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.393224 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p27t2\" (UniqueName: \"kubernetes.io/projected/39c24196-2136-47b7-9aa5-372cf1de38db-kube-api-access-p27t2\") pod \"placement-f6ee-account-create-update-w9mnd\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.393488 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39c24196-2136-47b7-9aa5-372cf1de38db-operator-scripts\") pod \"placement-f6ee-account-create-update-w9mnd\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.394464 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/880b1430-d97c-499e-983f-6ec48060e96b-operator-scripts\") pod \"placement-db-create-lgf2r\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.394621 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b89r\" (UniqueName: \"kubernetes.io/projected/880b1430-d97c-499e-983f-6ec48060e96b-kube-api-access-4b89r\") pod \"placement-db-create-lgf2r\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.398272 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/880b1430-d97c-499e-983f-6ec48060e96b-operator-scripts\") pod \"placement-db-create-lgf2r\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.398877 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39c24196-2136-47b7-9aa5-372cf1de38db-operator-scripts\") pod \"placement-f6ee-account-create-update-w9mnd\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.434434 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p27t2\" (UniqueName: \"kubernetes.io/projected/39c24196-2136-47b7-9aa5-372cf1de38db-kube-api-access-p27t2\") pod \"placement-f6ee-account-create-update-w9mnd\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.436007 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b89r\" (UniqueName: \"kubernetes.io/projected/880b1430-d97c-499e-983f-6ec48060e96b-kube-api-access-4b89r\") pod \"placement-db-create-lgf2r\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.497623 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-dns-svc\") pod \"b72dcc76-4198-4feb-a942-6a9073765a8e\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.498422 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-6cc7-account-create-update-96qr2"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.499883 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.501569 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxcxh\" (UniqueName: \"kubernetes.io/projected/b72dcc76-4198-4feb-a942-6a9073765a8e-kube-api-access-wxcxh\") pod \"b72dcc76-4198-4feb-a942-6a9073765a8e\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.504154 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-config\") pod \"b72dcc76-4198-4feb-a942-6a9073765a8e\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.504842 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-ovsdbserver-nb\") pod \"b72dcc76-4198-4feb-a942-6a9073765a8e\" (UID: \"b72dcc76-4198-4feb-a942-6a9073765a8e\") " Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.505101 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b72dcc76-4198-4feb-a942-6a9073765a8e-kube-api-access-wxcxh" (OuterVolumeSpecName: "kube-api-access-wxcxh") pod "b72dcc76-4198-4feb-a942-6a9073765a8e" (UID: "b72dcc76-4198-4feb-a942-6a9073765a8e"). InnerVolumeSpecName "kube-api-access-wxcxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.505534 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.505651 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-operator-scripts\") pod \"glance-db-create-k8zlg\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.505780 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz29s\" (UniqueName: \"kubernetes.io/projected/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-kube-api-access-pz29s\") pod \"glance-db-create-k8zlg\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.505948 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxcxh\" (UniqueName: \"kubernetes.io/projected/b72dcc76-4198-4feb-a942-6a9073765a8e-kube-api-access-wxcxh\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.508159 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.509090 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-6cc7-account-create-update-96qr2"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.571715 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-config" (OuterVolumeSpecName: "config") pod "b72dcc76-4198-4feb-a942-6a9073765a8e" (UID: "b72dcc76-4198-4feb-a942-6a9073765a8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.576867 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b72dcc76-4198-4feb-a942-6a9073765a8e" (UID: "b72dcc76-4198-4feb-a942-6a9073765a8e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.581242 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b72dcc76-4198-4feb-a942-6a9073765a8e" (UID: "b72dcc76-4198-4feb-a942-6a9073765a8e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.607892 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz29s\" (UniqueName: \"kubernetes.io/projected/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-kube-api-access-pz29s\") pod \"glance-db-create-k8zlg\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.607950 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67e2c579-4d8a-4ee6-bc94-45188bce58d4-operator-scripts\") pod \"glance-6cc7-account-create-update-96qr2\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.608049 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-operator-scripts\") pod \"glance-db-create-k8zlg\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.608075 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn5g6\" (UniqueName: \"kubernetes.io/projected/67e2c579-4d8a-4ee6-bc94-45188bce58d4-kube-api-access-xn5g6\") pod \"glance-6cc7-account-create-update-96qr2\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.608119 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.608129 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.608140 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72dcc76-4198-4feb-a942-6a9073765a8e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.609277 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-operator-scripts\") pod \"glance-db-create-k8zlg\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.631162 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.638228 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz29s\" (UniqueName: \"kubernetes.io/projected/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-kube-api-access-pz29s\") pod \"glance-db-create-k8zlg\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.693204 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.719268 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn5g6\" (UniqueName: \"kubernetes.io/projected/67e2c579-4d8a-4ee6-bc94-45188bce58d4-kube-api-access-xn5g6\") pod \"glance-6cc7-account-create-update-96qr2\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.719333 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67e2c579-4d8a-4ee6-bc94-45188bce58d4-operator-scripts\") pod \"glance-6cc7-account-create-update-96qr2\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.720052 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67e2c579-4d8a-4ee6-bc94-45188bce58d4-operator-scripts\") pod \"glance-6cc7-account-create-update-96qr2\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.742798 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn5g6\" (UniqueName: \"kubernetes.io/projected/67e2c579-4d8a-4ee6-bc94-45188bce58d4-kube-api-access-xn5g6\") pod \"glance-6cc7-account-create-update-96qr2\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.761769 4921 generic.go:334] "Generic (PLEG): container finished" podID="b72dcc76-4198-4feb-a942-6a9073765a8e" containerID="c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4" exitCode=0 Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.761810 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" event={"ID":"b72dcc76-4198-4feb-a942-6a9073765a8e","Type":"ContainerDied","Data":"c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4"} Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.761837 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" event={"ID":"b72dcc76-4198-4feb-a942-6a9073765a8e","Type":"ContainerDied","Data":"87faa32da607eb32b450603777153278f4a3622c24687a898bc14f4b23727915"} Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.761857 4921 scope.go:117] "RemoveContainer" containerID="c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.761978 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-b8bwc" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.804202 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-b8bwc"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.810362 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-b8bwc"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.824226 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.832502 4921 scope.go:117] "RemoveContainer" containerID="e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.895803 4921 scope.go:117] "RemoveContainer" containerID="c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4" Dec 10 13:12:54 crc kubenswrapper[4921]: E1210 13:12:54.903274 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4\": container with ID starting with c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4 not found: ID does not exist" containerID="c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.903318 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4"} err="failed to get container status \"c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4\": rpc error: code = NotFound desc = could not find container \"c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4\": container with ID starting with c8fddc61acafcb73d26a1c868452b67c0d32820ebf766e3b9afec7c5719f6eb4 not found: ID does not exist" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.903344 4921 scope.go:117] "RemoveContainer" containerID="e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82" Dec 10 13:12:54 crc kubenswrapper[4921]: E1210 13:12:54.909658 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82\": container with ID starting with e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82 not found: ID does not exist" containerID="e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.909687 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82"} err="failed to get container status \"e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82\": rpc error: code = NotFound desc = could not find container \"e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82\": container with ID starting with e305687028a7bd8c7d2e48e7146f9aa3f2c8da14a02515fccca76c75cc6b6b82 not found: ID does not exist" Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.912375 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-f7n4f"] Dec 10 13:12:54 crc kubenswrapper[4921]: I1210 13:12:54.921316 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9f3a-account-create-update-fl26p"] Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.018063 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f6ee-account-create-update-w9mnd"] Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.084772 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lgf2r"] Dec 10 13:12:55 crc kubenswrapper[4921]: W1210 13:12:55.125769 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod880b1430_d97c_499e_983f_6ec48060e96b.slice/crio-819e67577a0aaa4d7607947de0243470c1c99362d4edef398eac937a06f948b5 WatchSource:0}: Error finding container 819e67577a0aaa4d7607947de0243470c1c99362d4edef398eac937a06f948b5: Status 404 returned error can't find the container with id 819e67577a0aaa4d7607947de0243470c1c99362d4edef398eac937a06f948b5 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.217452 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b72dcc76-4198-4feb-a942-6a9073765a8e" path="/var/lib/kubelet/pods/b72dcc76-4198-4feb-a942-6a9073765a8e/volumes" Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.326345 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k8zlg"] Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.386638 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-6cc7-account-create-update-96qr2"] Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.770774 4921 generic.go:334] "Generic (PLEG): container finished" podID="f6179226-7f67-4fad-95fd-7542f4abdcff" containerID="f3ae60355260c0e593ce3bd90588adcbd2a4a48b5cbc752b5896e20af1e67300" exitCode=0 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.770957 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9f3a-account-create-update-fl26p" event={"ID":"f6179226-7f67-4fad-95fd-7542f4abdcff","Type":"ContainerDied","Data":"f3ae60355260c0e593ce3bd90588adcbd2a4a48b5cbc752b5896e20af1e67300"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.771663 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9f3a-account-create-update-fl26p" event={"ID":"f6179226-7f67-4fad-95fd-7542f4abdcff","Type":"ContainerStarted","Data":"c1a29e9bd21b3035a6ec9833356de467a58add8db55f1ccc14a557e5d6245470"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.772888 4921 generic.go:334] "Generic (PLEG): container finished" podID="2082e853-50b5-45c4-ba0a-d27cdd8e702a" containerID="72f68979cfe62c4ddd72ec2b3bf1bca68c8f49fa84227930a085ec178eb09e29" exitCode=0 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.773025 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-f7n4f" event={"ID":"2082e853-50b5-45c4-ba0a-d27cdd8e702a","Type":"ContainerDied","Data":"72f68979cfe62c4ddd72ec2b3bf1bca68c8f49fa84227930a085ec178eb09e29"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.773193 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-f7n4f" event={"ID":"2082e853-50b5-45c4-ba0a-d27cdd8e702a","Type":"ContainerStarted","Data":"28010e8782a11dc344a631412ffd382669a7538d4c761f4e7b346695e0ce49a1"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.774553 4921 generic.go:334] "Generic (PLEG): container finished" podID="39c24196-2136-47b7-9aa5-372cf1de38db" containerID="8b4c6c48222767cc4b0baf809878d54b028d221e4544a96cf1e56740fc07cfd5" exitCode=0 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.774665 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f6ee-account-create-update-w9mnd" event={"ID":"39c24196-2136-47b7-9aa5-372cf1de38db","Type":"ContainerDied","Data":"8b4c6c48222767cc4b0baf809878d54b028d221e4544a96cf1e56740fc07cfd5"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.774757 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f6ee-account-create-update-w9mnd" event={"ID":"39c24196-2136-47b7-9aa5-372cf1de38db","Type":"ContainerStarted","Data":"a6e17aeb460fb72f512e682b7deb34b02986ef610b3a23e40a08d3e43f20843f"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.776114 4921 generic.go:334] "Generic (PLEG): container finished" podID="b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" containerID="cf0b5f250cabecaae51f6158f1a69de27f852a281c6cb357f5fe6b988258181e" exitCode=0 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.776231 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k8zlg" event={"ID":"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93","Type":"ContainerDied","Data":"cf0b5f250cabecaae51f6158f1a69de27f852a281c6cb357f5fe6b988258181e"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.776305 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k8zlg" event={"ID":"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93","Type":"ContainerStarted","Data":"fbda4e0f3b87966d67e7577a67ca13e0d9a440f4f79ad7ad41abd2574f2f5557"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.778093 4921 generic.go:334] "Generic (PLEG): container finished" podID="880b1430-d97c-499e-983f-6ec48060e96b" containerID="bf80757d248f1b911eea2282899dc08284b186a27dc3cbf291d7f89a7958e70e" exitCode=0 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.778228 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lgf2r" event={"ID":"880b1430-d97c-499e-983f-6ec48060e96b","Type":"ContainerDied","Data":"bf80757d248f1b911eea2282899dc08284b186a27dc3cbf291d7f89a7958e70e"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.778271 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lgf2r" event={"ID":"880b1430-d97c-499e-983f-6ec48060e96b","Type":"ContainerStarted","Data":"819e67577a0aaa4d7607947de0243470c1c99362d4edef398eac937a06f948b5"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.779505 4921 generic.go:334] "Generic (PLEG): container finished" podID="67e2c579-4d8a-4ee6-bc94-45188bce58d4" containerID="9f99ecbc9cfbe13cb75b13ca6e15739cd31ea355853b4669d90754b200dd451a" exitCode=0 Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.779628 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6cc7-account-create-update-96qr2" event={"ID":"67e2c579-4d8a-4ee6-bc94-45188bce58d4","Type":"ContainerDied","Data":"9f99ecbc9cfbe13cb75b13ca6e15739cd31ea355853b4669d90754b200dd451a"} Dec 10 13:12:55 crc kubenswrapper[4921]: I1210 13:12:55.779771 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6cc7-account-create-update-96qr2" event={"ID":"67e2c579-4d8a-4ee6-bc94-45188bce58d4","Type":"ContainerStarted","Data":"8b4a7bcbab9c994ab41e6c6947b85ee6350ddd6170c7a2844ed9a875ea80d42c"} Dec 10 13:12:56 crc kubenswrapper[4921]: I1210 13:12:56.108614 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 10 13:12:56 crc kubenswrapper[4921]: I1210 13:12:56.116338 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.169973 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.262475 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p27t2\" (UniqueName: \"kubernetes.io/projected/39c24196-2136-47b7-9aa5-372cf1de38db-kube-api-access-p27t2\") pod \"39c24196-2136-47b7-9aa5-372cf1de38db\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.262746 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39c24196-2136-47b7-9aa5-372cf1de38db-operator-scripts\") pod \"39c24196-2136-47b7-9aa5-372cf1de38db\" (UID: \"39c24196-2136-47b7-9aa5-372cf1de38db\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.263849 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39c24196-2136-47b7-9aa5-372cf1de38db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39c24196-2136-47b7-9aa5-372cf1de38db" (UID: "39c24196-2136-47b7-9aa5-372cf1de38db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.264520 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39c24196-2136-47b7-9aa5-372cf1de38db-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.268837 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39c24196-2136-47b7-9aa5-372cf1de38db-kube-api-access-p27t2" (OuterVolumeSpecName: "kube-api-access-p27t2") pod "39c24196-2136-47b7-9aa5-372cf1de38db" (UID: "39c24196-2136-47b7-9aa5-372cf1de38db"). InnerVolumeSpecName "kube-api-access-p27t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.375109 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p27t2\" (UniqueName: \"kubernetes.io/projected/39c24196-2136-47b7-9aa5-372cf1de38db-kube-api-access-p27t2\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.401687 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.424603 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.432836 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.446881 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.448171 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.577953 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d5b7\" (UniqueName: \"kubernetes.io/projected/f6179226-7f67-4fad-95fd-7542f4abdcff-kube-api-access-4d5b7\") pod \"f6179226-7f67-4fad-95fd-7542f4abdcff\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578151 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6179226-7f67-4fad-95fd-7542f4abdcff-operator-scripts\") pod \"f6179226-7f67-4fad-95fd-7542f4abdcff\" (UID: \"f6179226-7f67-4fad-95fd-7542f4abdcff\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578225 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz29s\" (UniqueName: \"kubernetes.io/projected/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-kube-api-access-pz29s\") pod \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578259 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2082e853-50b5-45c4-ba0a-d27cdd8e702a-operator-scripts\") pod \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578306 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67e2c579-4d8a-4ee6-bc94-45188bce58d4-operator-scripts\") pod \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578354 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/880b1430-d97c-499e-983f-6ec48060e96b-operator-scripts\") pod \"880b1430-d97c-499e-983f-6ec48060e96b\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578913 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67e2c579-4d8a-4ee6-bc94-45188bce58d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "67e2c579-4d8a-4ee6-bc94-45188bce58d4" (UID: "67e2c579-4d8a-4ee6-bc94-45188bce58d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578926 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6179226-7f67-4fad-95fd-7542f4abdcff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f6179226-7f67-4fad-95fd-7542f4abdcff" (UID: "f6179226-7f67-4fad-95fd-7542f4abdcff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578956 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2082e853-50b5-45c4-ba0a-d27cdd8e702a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2082e853-50b5-45c4-ba0a-d27cdd8e702a" (UID: "2082e853-50b5-45c4-ba0a-d27cdd8e702a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578957 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/880b1430-d97c-499e-983f-6ec48060e96b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "880b1430-d97c-499e-983f-6ec48060e96b" (UID: "880b1430-d97c-499e-983f-6ec48060e96b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.578970 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn5g6\" (UniqueName: \"kubernetes.io/projected/67e2c579-4d8a-4ee6-bc94-45188bce58d4-kube-api-access-xn5g6\") pod \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\" (UID: \"67e2c579-4d8a-4ee6-bc94-45188bce58d4\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.579029 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-operator-scripts\") pod \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\" (UID: \"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.579077 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4b89r\" (UniqueName: \"kubernetes.io/projected/880b1430-d97c-499e-983f-6ec48060e96b-kube-api-access-4b89r\") pod \"880b1430-d97c-499e-983f-6ec48060e96b\" (UID: \"880b1430-d97c-499e-983f-6ec48060e96b\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.579109 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6n88\" (UniqueName: \"kubernetes.io/projected/2082e853-50b5-45c4-ba0a-d27cdd8e702a-kube-api-access-h6n88\") pod \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\" (UID: \"2082e853-50b5-45c4-ba0a-d27cdd8e702a\") " Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.579652 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" (UID: "b0a10f7e-8028-455f-8db2-8c6e0b6e9d93"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.580350 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67e2c579-4d8a-4ee6-bc94-45188bce58d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.580371 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/880b1430-d97c-499e-983f-6ec48060e96b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.580380 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.580457 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6179226-7f67-4fad-95fd-7542f4abdcff-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.580467 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2082e853-50b5-45c4-ba0a-d27cdd8e702a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.582200 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67e2c579-4d8a-4ee6-bc94-45188bce58d4-kube-api-access-xn5g6" (OuterVolumeSpecName: "kube-api-access-xn5g6") pod "67e2c579-4d8a-4ee6-bc94-45188bce58d4" (UID: "67e2c579-4d8a-4ee6-bc94-45188bce58d4"). InnerVolumeSpecName "kube-api-access-xn5g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.583369 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2082e853-50b5-45c4-ba0a-d27cdd8e702a-kube-api-access-h6n88" (OuterVolumeSpecName: "kube-api-access-h6n88") pod "2082e853-50b5-45c4-ba0a-d27cdd8e702a" (UID: "2082e853-50b5-45c4-ba0a-d27cdd8e702a"). InnerVolumeSpecName "kube-api-access-h6n88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.583431 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-kube-api-access-pz29s" (OuterVolumeSpecName: "kube-api-access-pz29s") pod "b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" (UID: "b0a10f7e-8028-455f-8db2-8c6e0b6e9d93"). InnerVolumeSpecName "kube-api-access-pz29s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.583897 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6179226-7f67-4fad-95fd-7542f4abdcff-kube-api-access-4d5b7" (OuterVolumeSpecName: "kube-api-access-4d5b7") pod "f6179226-7f67-4fad-95fd-7542f4abdcff" (UID: "f6179226-7f67-4fad-95fd-7542f4abdcff"). InnerVolumeSpecName "kube-api-access-4d5b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.583967 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/880b1430-d97c-499e-983f-6ec48060e96b-kube-api-access-4b89r" (OuterVolumeSpecName: "kube-api-access-4b89r") pod "880b1430-d97c-499e-983f-6ec48060e96b" (UID: "880b1430-d97c-499e-983f-6ec48060e96b"). InnerVolumeSpecName "kube-api-access-4b89r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.681663 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d5b7\" (UniqueName: \"kubernetes.io/projected/f6179226-7f67-4fad-95fd-7542f4abdcff-kube-api-access-4d5b7\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.681695 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz29s\" (UniqueName: \"kubernetes.io/projected/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93-kube-api-access-pz29s\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.681704 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn5g6\" (UniqueName: \"kubernetes.io/projected/67e2c579-4d8a-4ee6-bc94-45188bce58d4-kube-api-access-xn5g6\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.681712 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4b89r\" (UniqueName: \"kubernetes.io/projected/880b1430-d97c-499e-983f-6ec48060e96b-kube-api-access-4b89r\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.681721 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6n88\" (UniqueName: \"kubernetes.io/projected/2082e853-50b5-45c4-ba0a-d27cdd8e702a-kube-api-access-h6n88\") on node \"crc\" DevicePath \"\"" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.816024 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lgf2r" event={"ID":"880b1430-d97c-499e-983f-6ec48060e96b","Type":"ContainerDied","Data":"819e67577a0aaa4d7607947de0243470c1c99362d4edef398eac937a06f948b5"} Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.816072 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="819e67577a0aaa4d7607947de0243470c1c99362d4edef398eac937a06f948b5" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.816113 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lgf2r" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.817349 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-6cc7-account-create-update-96qr2" event={"ID":"67e2c579-4d8a-4ee6-bc94-45188bce58d4","Type":"ContainerDied","Data":"8b4a7bcbab9c994ab41e6c6947b85ee6350ddd6170c7a2844ed9a875ea80d42c"} Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.817379 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b4a7bcbab9c994ab41e6c6947b85ee6350ddd6170c7a2844ed9a875ea80d42c" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.817407 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-6cc7-account-create-update-96qr2" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.821105 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9f3a-account-create-update-fl26p" event={"ID":"f6179226-7f67-4fad-95fd-7542f4abdcff","Type":"ContainerDied","Data":"c1a29e9bd21b3035a6ec9833356de467a58add8db55f1ccc14a557e5d6245470"} Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.821151 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1a29e9bd21b3035a6ec9833356de467a58add8db55f1ccc14a557e5d6245470" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.821123 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9f3a-account-create-update-fl26p" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.822759 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-f7n4f" event={"ID":"2082e853-50b5-45c4-ba0a-d27cdd8e702a","Type":"ContainerDied","Data":"28010e8782a11dc344a631412ffd382669a7538d4c761f4e7b346695e0ce49a1"} Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.822888 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28010e8782a11dc344a631412ffd382669a7538d4c761f4e7b346695e0ce49a1" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.822818 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f7n4f" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.823857 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f6ee-account-create-update-w9mnd" event={"ID":"39c24196-2136-47b7-9aa5-372cf1de38db","Type":"ContainerDied","Data":"a6e17aeb460fb72f512e682b7deb34b02986ef610b3a23e40a08d3e43f20843f"} Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.823899 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6e17aeb460fb72f512e682b7deb34b02986ef610b3a23e40a08d3e43f20843f" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.823952 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f6ee-account-create-update-w9mnd" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.826328 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k8zlg" event={"ID":"b0a10f7e-8028-455f-8db2-8c6e0b6e9d93","Type":"ContainerDied","Data":"fbda4e0f3b87966d67e7577a67ca13e0d9a440f4f79ad7ad41abd2574f2f5557"} Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.826359 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k8zlg" Dec 10 13:12:57 crc kubenswrapper[4921]: I1210 13:12:57.826355 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbda4e0f3b87966d67e7577a67ca13e0d9a440f4f79ad7ad41abd2574f2f5557" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.097276 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-75zw8" podUID="be165009-1ecf-4849-8cff-e83071094e81" containerName="ovn-controller" probeResult="failure" output=< Dec 10 13:12:59 crc kubenswrapper[4921]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 10 13:12:59 crc kubenswrapper[4921]: > Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.151095 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.153311 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-zv79d" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.400164 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-75zw8-config-fqw79"] Dec 10 13:12:59 crc kubenswrapper[4921]: E1210 13:12:59.400784 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2082e853-50b5-45c4-ba0a-d27cdd8e702a" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.400800 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="2082e853-50b5-45c4-ba0a-d27cdd8e702a" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: E1210 13:12:59.400822 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.400829 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: E1210 13:12:59.400855 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="880b1430-d97c-499e-983f-6ec48060e96b" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.400861 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="880b1430-d97c-499e-983f-6ec48060e96b" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: E1210 13:12:59.400868 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e2c579-4d8a-4ee6-bc94-45188bce58d4" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.400874 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e2c579-4d8a-4ee6-bc94-45188bce58d4" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: E1210 13:12:59.400898 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39c24196-2136-47b7-9aa5-372cf1de38db" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.400904 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="39c24196-2136-47b7-9aa5-372cf1de38db" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: E1210 13:12:59.400916 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6179226-7f67-4fad-95fd-7542f4abdcff" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.404535 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6179226-7f67-4fad-95fd-7542f4abdcff" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.405089 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="39c24196-2136-47b7-9aa5-372cf1de38db" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.405125 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="2082e853-50b5-45c4-ba0a-d27cdd8e702a" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.405140 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6179226-7f67-4fad-95fd-7542f4abdcff" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.405153 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="880b1430-d97c-499e-983f-6ec48060e96b" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.405165 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e2c579-4d8a-4ee6-bc94-45188bce58d4" containerName="mariadb-account-create-update" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.405185 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" containerName="mariadb-database-create" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.411714 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.414263 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.418359 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-75zw8-config-fqw79"] Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.513660 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run-ovn\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.513711 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-log-ovn\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.513740 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.513774 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-scripts\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.513818 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s5nl\" (UniqueName: \"kubernetes.io/projected/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-kube-api-access-5s5nl\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.513834 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-additional-scripts\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.614826 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run-ovn\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615126 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-log-ovn\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615247 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-log-ovn\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615247 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run-ovn\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615259 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615526 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615567 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-scripts\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615767 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s5nl\" (UniqueName: \"kubernetes.io/projected/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-kube-api-access-5s5nl\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.615846 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-additional-scripts\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.616609 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-additional-scripts\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.617941 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-scripts\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.637051 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s5nl\" (UniqueName: \"kubernetes.io/projected/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-kube-api-access-5s5nl\") pod \"ovn-controller-75zw8-config-fqw79\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.739583 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.758753 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-zc8hg"] Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.759889 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.763681 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.763862 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dfb4r" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.778819 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zc8hg"] Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.818258 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-combined-ca-bundle\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.818614 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-config-data\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.818792 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-db-sync-config-data\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.819057 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2c8m\" (UniqueName: \"kubernetes.io/projected/99af9def-7de6-4bab-98f9-890433c3836e-kube-api-access-j2c8m\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.920763 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-db-sync-config-data\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.921260 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2c8m\" (UniqueName: \"kubernetes.io/projected/99af9def-7de6-4bab-98f9-890433c3836e-kube-api-access-j2c8m\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.921287 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-combined-ca-bundle\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.921310 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-config-data\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.927978 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-db-sync-config-data\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.927993 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-config-data\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.930337 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-combined-ca-bundle\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:12:59 crc kubenswrapper[4921]: I1210 13:12:59.950016 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2c8m\" (UniqueName: \"kubernetes.io/projected/99af9def-7de6-4bab-98f9-890433c3836e-kube-api-access-j2c8m\") pod \"glance-db-sync-zc8hg\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " pod="openstack/glance-db-sync-zc8hg" Dec 10 13:13:00 crc kubenswrapper[4921]: W1210 13:13:00.053272 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1d6fc5c_8d39_4caa_bf7b_f42f07edaebe.slice/crio-cba157a01dc88fcd6afe2991984ee361f8bfc512dab314e1b0f10269df9f8d2e WatchSource:0}: Error finding container cba157a01dc88fcd6afe2991984ee361f8bfc512dab314e1b0f10269df9f8d2e: Status 404 returned error can't find the container with id cba157a01dc88fcd6afe2991984ee361f8bfc512dab314e1b0f10269df9f8d2e Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.059609 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-75zw8-config-fqw79"] Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.149736 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zc8hg" Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.437663 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.728463 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zc8hg"] Dec 10 13:13:00 crc kubenswrapper[4921]: W1210 13:13:00.730931 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99af9def_7de6_4bab_98f9_890433c3836e.slice/crio-51af92cc1ee02978900c55bd1a1b09a7453c2a2f4a3bd5317dec9b50e71697fa WatchSource:0}: Error finding container 51af92cc1ee02978900c55bd1a1b09a7453c2a2f4a3bd5317dec9b50e71697fa: Status 404 returned error can't find the container with id 51af92cc1ee02978900c55bd1a1b09a7453c2a2f4a3bd5317dec9b50e71697fa Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.853347 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zc8hg" event={"ID":"99af9def-7de6-4bab-98f9-890433c3836e","Type":"ContainerStarted","Data":"51af92cc1ee02978900c55bd1a1b09a7453c2a2f4a3bd5317dec9b50e71697fa"} Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.855278 4921 generic.go:334] "Generic (PLEG): container finished" podID="c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" containerID="c55ad0a6c9269f9e508c276faf37d1bc329a34437f659118e593a633b057a28c" exitCode=0 Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.855347 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-fqw79" event={"ID":"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe","Type":"ContainerDied","Data":"c55ad0a6c9269f9e508c276faf37d1bc329a34437f659118e593a633b057a28c"} Dec 10 13:13:00 crc kubenswrapper[4921]: I1210 13:13:00.855479 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-fqw79" event={"ID":"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe","Type":"ContainerStarted","Data":"cba157a01dc88fcd6afe2991984ee361f8bfc512dab314e1b0f10269df9f8d2e"} Dec 10 13:13:01 crc kubenswrapper[4921]: I1210 13:13:01.869428 4921 generic.go:334] "Generic (PLEG): container finished" podID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerID="be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9" exitCode=0 Dec 10 13:13:01 crc kubenswrapper[4921]: I1210 13:13:01.869704 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5","Type":"ContainerDied","Data":"be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9"} Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.205055 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.371915 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run-ovn\") pod \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.372284 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-additional-scripts\") pod \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.372094 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" (UID: "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.372408 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run\") pod \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.372577 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run" (OuterVolumeSpecName: "var-run") pod "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" (UID: "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.372907 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-log-ovn\") pod \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.372998 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5s5nl\" (UniqueName: \"kubernetes.io/projected/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-kube-api-access-5s5nl\") pod \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373033 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-scripts\") pod \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\" (UID: \"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe\") " Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373120 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" (UID: "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373152 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" (UID: "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373804 4921 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373824 4921 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373836 4921 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373845 4921 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.373891 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-scripts" (OuterVolumeSpecName: "scripts") pod "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" (UID: "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.378554 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-kube-api-access-5s5nl" (OuterVolumeSpecName: "kube-api-access-5s5nl") pod "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" (UID: "c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe"). InnerVolumeSpecName "kube-api-access-5s5nl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.475607 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5s5nl\" (UniqueName: \"kubernetes.io/projected/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-kube-api-access-5s5nl\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.475640 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.881161 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-fqw79" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.881181 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-fqw79" event={"ID":"c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe","Type":"ContainerDied","Data":"cba157a01dc88fcd6afe2991984ee361f8bfc512dab314e1b0f10269df9f8d2e"} Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.881333 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cba157a01dc88fcd6afe2991984ee361f8bfc512dab314e1b0f10269df9f8d2e" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.898640 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5","Type":"ContainerStarted","Data":"399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50"} Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.901408 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:13:02 crc kubenswrapper[4921]: I1210 13:13:02.935898 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.223730805 podStartE2EDuration="1m23.935880353s" podCreationTimestamp="2025-12-10 13:11:39 +0000 UTC" firstStartedPulling="2025-12-10 13:11:41.383686415 +0000 UTC m=+898.599908339" lastFinishedPulling="2025-12-10 13:12:28.095835963 +0000 UTC m=+945.312057887" observedRunningTime="2025-12-10 13:13:02.933512029 +0000 UTC m=+980.149733973" watchObservedRunningTime="2025-12-10 13:13:02.935880353 +0000 UTC m=+980.152102277" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.324057 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-75zw8-config-fqw79"] Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.337019 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-75zw8-config-fqw79"] Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.543211 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-75zw8-config-jjz96"] Dec 10 13:13:03 crc kubenswrapper[4921]: E1210 13:13:03.543660 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" containerName="ovn-config" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.543682 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" containerName="ovn-config" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.543871 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" containerName="ovn-config" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.544575 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.547646 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.561293 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-75zw8-config-jjz96"] Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.700527 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-scripts\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.700611 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-log-ovn\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.700656 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4gx7\" (UniqueName: \"kubernetes.io/projected/e3676e98-0f82-449a-9483-4ff26a70038f-kube-api-access-l4gx7\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.700713 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run-ovn\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.700740 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.700808 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-additional-scripts\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.801880 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-scripts\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.801942 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-log-ovn\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.801975 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4gx7\" (UniqueName: \"kubernetes.io/projected/e3676e98-0f82-449a-9483-4ff26a70038f-kube-api-access-l4gx7\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802033 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run-ovn\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802054 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802106 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-additional-scripts\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802736 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-log-ovn\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802746 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802758 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run-ovn\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.802848 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-additional-scripts\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.806681 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-scripts\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.820167 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4gx7\" (UniqueName: \"kubernetes.io/projected/e3676e98-0f82-449a-9483-4ff26a70038f-kube-api-access-l4gx7\") pod \"ovn-controller-75zw8-config-jjz96\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:03 crc kubenswrapper[4921]: I1210 13:13:03.866785 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:04 crc kubenswrapper[4921]: I1210 13:13:04.120869 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-75zw8" Dec 10 13:13:04 crc kubenswrapper[4921]: I1210 13:13:04.266359 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-75zw8-config-jjz96"] Dec 10 13:13:04 crc kubenswrapper[4921]: W1210 13:13:04.299172 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3676e98_0f82_449a_9483_4ff26a70038f.slice/crio-b2e6a078b950817f6153bde2589f5032a8cd10b847ae0ed4633c47e8c79ce170 WatchSource:0}: Error finding container b2e6a078b950817f6153bde2589f5032a8cd10b847ae0ed4633c47e8c79ce170: Status 404 returned error can't find the container with id b2e6a078b950817f6153bde2589f5032a8cd10b847ae0ed4633c47e8c79ce170 Dec 10 13:13:04 crc kubenswrapper[4921]: I1210 13:13:04.931173 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-jjz96" event={"ID":"e3676e98-0f82-449a-9483-4ff26a70038f","Type":"ContainerStarted","Data":"dc80fc09ebb77d2928c25b3aeb2b8dcb8c4dabe8ca7b77ac7536afb9d50ba60e"} Dec 10 13:13:04 crc kubenswrapper[4921]: I1210 13:13:04.931615 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-jjz96" event={"ID":"e3676e98-0f82-449a-9483-4ff26a70038f","Type":"ContainerStarted","Data":"b2e6a078b950817f6153bde2589f5032a8cd10b847ae0ed4633c47e8c79ce170"} Dec 10 13:13:04 crc kubenswrapper[4921]: I1210 13:13:04.954442 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-75zw8-config-jjz96" podStartSLOduration=1.9544196440000001 podStartE2EDuration="1.954419644s" podCreationTimestamp="2025-12-10 13:13:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:04.94830375 +0000 UTC m=+982.164525694" watchObservedRunningTime="2025-12-10 13:13:04.954419644 +0000 UTC m=+982.170641578" Dec 10 13:13:05 crc kubenswrapper[4921]: I1210 13:13:05.213443 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe" path="/var/lib/kubelet/pods/c1d6fc5c-8d39-4caa-bf7b-f42f07edaebe/volumes" Dec 10 13:13:05 crc kubenswrapper[4921]: I1210 13:13:05.952285 4921 generic.go:334] "Generic (PLEG): container finished" podID="e3676e98-0f82-449a-9483-4ff26a70038f" containerID="dc80fc09ebb77d2928c25b3aeb2b8dcb8c4dabe8ca7b77ac7536afb9d50ba60e" exitCode=0 Dec 10 13:13:05 crc kubenswrapper[4921]: I1210 13:13:05.952628 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-jjz96" event={"ID":"e3676e98-0f82-449a-9483-4ff26a70038f","Type":"ContainerDied","Data":"dc80fc09ebb77d2928c25b3aeb2b8dcb8c4dabe8ca7b77ac7536afb9d50ba60e"} Dec 10 13:13:14 crc kubenswrapper[4921]: I1210 13:13:14.020132 4921 generic.go:334] "Generic (PLEG): container finished" podID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerID="ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44" exitCode=0 Dec 10 13:13:14 crc kubenswrapper[4921]: I1210 13:13:14.020313 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c651083f-4dd3-4963-892f-ddbc5ef1af05","Type":"ContainerDied","Data":"ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44"} Dec 10 13:13:16 crc kubenswrapper[4921]: E1210 13:13:16.577191 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 10 13:13:16 crc kubenswrapper[4921]: E1210 13:13:16.578115 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2c8m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-zc8hg_openstack(99af9def-7de6-4bab-98f9-890433c3836e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:13:16 crc kubenswrapper[4921]: E1210 13:13:16.579650 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-zc8hg" podUID="99af9def-7de6-4bab-98f9-890433c3836e" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.674149 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.800355 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run\") pod \"e3676e98-0f82-449a-9483-4ff26a70038f\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801043 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4gx7\" (UniqueName: \"kubernetes.io/projected/e3676e98-0f82-449a-9483-4ff26a70038f-kube-api-access-l4gx7\") pod \"e3676e98-0f82-449a-9483-4ff26a70038f\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801074 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-additional-scripts\") pod \"e3676e98-0f82-449a-9483-4ff26a70038f\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.800992 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run" (OuterVolumeSpecName: "var-run") pod "e3676e98-0f82-449a-9483-4ff26a70038f" (UID: "e3676e98-0f82-449a-9483-4ff26a70038f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801164 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-log-ovn\") pod \"e3676e98-0f82-449a-9483-4ff26a70038f\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801256 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-scripts\") pod \"e3676e98-0f82-449a-9483-4ff26a70038f\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801338 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run-ovn\") pod \"e3676e98-0f82-449a-9483-4ff26a70038f\" (UID: \"e3676e98-0f82-449a-9483-4ff26a70038f\") " Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801740 4921 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801800 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e3676e98-0f82-449a-9483-4ff26a70038f" (UID: "e3676e98-0f82-449a-9483-4ff26a70038f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.801858 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e3676e98-0f82-449a-9483-4ff26a70038f" (UID: "e3676e98-0f82-449a-9483-4ff26a70038f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.802449 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "e3676e98-0f82-449a-9483-4ff26a70038f" (UID: "e3676e98-0f82-449a-9483-4ff26a70038f"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.802718 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-scripts" (OuterVolumeSpecName: "scripts") pod "e3676e98-0f82-449a-9483-4ff26a70038f" (UID: "e3676e98-0f82-449a-9483-4ff26a70038f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.810414 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3676e98-0f82-449a-9483-4ff26a70038f-kube-api-access-l4gx7" (OuterVolumeSpecName: "kube-api-access-l4gx7") pod "e3676e98-0f82-449a-9483-4ff26a70038f" (UID: "e3676e98-0f82-449a-9483-4ff26a70038f"). InnerVolumeSpecName "kube-api-access-l4gx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.904371 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4gx7\" (UniqueName: \"kubernetes.io/projected/e3676e98-0f82-449a-9483-4ff26a70038f-kube-api-access-l4gx7\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.904475 4921 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.904489 4921 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.904499 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3676e98-0f82-449a-9483-4ff26a70038f-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:16 crc kubenswrapper[4921]: I1210 13:13:16.904508 4921 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3676e98-0f82-449a-9483-4ff26a70038f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.051757 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c651083f-4dd3-4963-892f-ddbc5ef1af05","Type":"ContainerStarted","Data":"b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e"} Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.052256 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.055260 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-75zw8-config-jjz96" event={"ID":"e3676e98-0f82-449a-9483-4ff26a70038f","Type":"ContainerDied","Data":"b2e6a078b950817f6153bde2589f5032a8cd10b847ae0ed4633c47e8c79ce170"} Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.055292 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2e6a078b950817f6153bde2589f5032a8cd10b847ae0ed4633c47e8c79ce170" Dec 10 13:13:17 crc kubenswrapper[4921]: E1210 13:13:17.056000 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-zc8hg" podUID="99af9def-7de6-4bab-98f9-890433c3836e" Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.056834 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-75zw8-config-jjz96" Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.090319 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371938.764486 podStartE2EDuration="1m38.090289789s" podCreationTimestamp="2025-12-10 13:11:39 +0000 UTC" firstStartedPulling="2025-12-10 13:11:42.185507832 +0000 UTC m=+899.401729756" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:17.079337075 +0000 UTC m=+994.295558999" watchObservedRunningTime="2025-12-10 13:13:17.090289789 +0000 UTC m=+994.306511713" Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.765737 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-75zw8-config-jjz96"] Dec 10 13:13:17 crc kubenswrapper[4921]: I1210 13:13:17.779368 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-75zw8-config-jjz96"] Dec 10 13:13:19 crc kubenswrapper[4921]: I1210 13:13:19.213225 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3676e98-0f82-449a-9483-4ff26a70038f" path="/var/lib/kubelet/pods/e3676e98-0f82-449a-9483-4ff26a70038f/volumes" Dec 10 13:13:20 crc kubenswrapper[4921]: I1210 13:13:20.589661 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.217845 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zc8hg" event={"ID":"99af9def-7de6-4bab-98f9-890433c3836e","Type":"ContainerStarted","Data":"b4b4a895c1597e20c0953747b4ee7a22b38b75c00abb3664de38febf2b0c1542"} Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.238053 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-zc8hg" podStartSLOduration=3.20025821 podStartE2EDuration="32.238030255s" podCreationTimestamp="2025-12-10 13:12:59 +0000 UTC" firstStartedPulling="2025-12-10 13:13:00.733095585 +0000 UTC m=+977.949317509" lastFinishedPulling="2025-12-10 13:13:29.77086761 +0000 UTC m=+1006.987089554" observedRunningTime="2025-12-10 13:13:31.234782128 +0000 UTC m=+1008.451004062" watchObservedRunningTime="2025-12-10 13:13:31.238030255 +0000 UTC m=+1008.454252179" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.283578 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.723649 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mgjsp"] Dec 10 13:13:31 crc kubenswrapper[4921]: E1210 13:13:31.724253 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3676e98-0f82-449a-9483-4ff26a70038f" containerName="ovn-config" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.724265 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3676e98-0f82-449a-9483-4ff26a70038f" containerName="ovn-config" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.724458 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3676e98-0f82-449a-9483-4ff26a70038f" containerName="ovn-config" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.724978 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.740971 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-3ad1-account-create-update-mvnfv"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.741952 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.746896 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.749655 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mgjsp"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.757003 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3ad1-account-create-update-mvnfv"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.822071 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-kkx6d"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.823233 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.834859 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kkx6d"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.842853 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-526f-account-create-update-76g79"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.844187 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.846283 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.867358 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2shdk\" (UniqueName: \"kubernetes.io/projected/13330318-8c10-4f9c-9e04-1c4e6005d84e-kube-api-access-2shdk\") pod \"cinder-db-create-mgjsp\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.867735 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d834ef72-8c06-49b5-b966-8114d30fe9de-operator-scripts\") pod \"barbican-3ad1-account-create-update-mvnfv\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.867778 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5gnw\" (UniqueName: \"kubernetes.io/projected/d834ef72-8c06-49b5-b966-8114d30fe9de-kube-api-access-l5gnw\") pod \"barbican-3ad1-account-create-update-mvnfv\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.867897 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13330318-8c10-4f9c-9e04-1c4e6005d84e-operator-scripts\") pod \"cinder-db-create-mgjsp\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.895898 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-526f-account-create-update-76g79"] Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969269 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1315d0-712b-4119-8569-304e3bcdf8e6-operator-scripts\") pod \"cinder-526f-account-create-update-76g79\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969322 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2shdk\" (UniqueName: \"kubernetes.io/projected/13330318-8c10-4f9c-9e04-1c4e6005d84e-kube-api-access-2shdk\") pod \"cinder-db-create-mgjsp\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969467 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13cffa1-660f-45b0-9207-39d7059f1ec8-operator-scripts\") pod \"barbican-db-create-kkx6d\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969533 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2prp\" (UniqueName: \"kubernetes.io/projected/f13cffa1-660f-45b0-9207-39d7059f1ec8-kube-api-access-h2prp\") pod \"barbican-db-create-kkx6d\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969556 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khgmw\" (UniqueName: \"kubernetes.io/projected/9e1315d0-712b-4119-8569-304e3bcdf8e6-kube-api-access-khgmw\") pod \"cinder-526f-account-create-update-76g79\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969730 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d834ef72-8c06-49b5-b966-8114d30fe9de-operator-scripts\") pod \"barbican-3ad1-account-create-update-mvnfv\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969758 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5gnw\" (UniqueName: \"kubernetes.io/projected/d834ef72-8c06-49b5-b966-8114d30fe9de-kube-api-access-l5gnw\") pod \"barbican-3ad1-account-create-update-mvnfv\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.969829 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13330318-8c10-4f9c-9e04-1c4e6005d84e-operator-scripts\") pod \"cinder-db-create-mgjsp\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.970515 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d834ef72-8c06-49b5-b966-8114d30fe9de-operator-scripts\") pod \"barbican-3ad1-account-create-update-mvnfv\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.970632 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13330318-8c10-4f9c-9e04-1c4e6005d84e-operator-scripts\") pod \"cinder-db-create-mgjsp\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.988265 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5gnw\" (UniqueName: \"kubernetes.io/projected/d834ef72-8c06-49b5-b966-8114d30fe9de-kube-api-access-l5gnw\") pod \"barbican-3ad1-account-create-update-mvnfv\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:31 crc kubenswrapper[4921]: I1210 13:13:31.990862 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2shdk\" (UniqueName: \"kubernetes.io/projected/13330318-8c10-4f9c-9e04-1c4e6005d84e-kube-api-access-2shdk\") pod \"cinder-db-create-mgjsp\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.032278 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-74qkt"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.033293 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.043644 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.056118 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.071305 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13cffa1-660f-45b0-9207-39d7059f1ec8-operator-scripts\") pod \"barbican-db-create-kkx6d\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.071407 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2prp\" (UniqueName: \"kubernetes.io/projected/f13cffa1-660f-45b0-9207-39d7059f1ec8-kube-api-access-h2prp\") pod \"barbican-db-create-kkx6d\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.071438 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khgmw\" (UniqueName: \"kubernetes.io/projected/9e1315d0-712b-4119-8569-304e3bcdf8e6-kube-api-access-khgmw\") pod \"cinder-526f-account-create-update-76g79\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.071524 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1315d0-712b-4119-8569-304e3bcdf8e6-operator-scripts\") pod \"cinder-526f-account-create-update-76g79\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.072182 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1315d0-712b-4119-8569-304e3bcdf8e6-operator-scripts\") pod \"cinder-526f-account-create-update-76g79\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.072576 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-74qkt"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.072621 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13cffa1-660f-45b0-9207-39d7059f1ec8-operator-scripts\") pod \"barbican-db-create-kkx6d\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.133891 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5836-account-create-update-98fk7"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.145446 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.151357 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.156900 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2prp\" (UniqueName: \"kubernetes.io/projected/f13cffa1-660f-45b0-9207-39d7059f1ec8-kube-api-access-h2prp\") pod \"barbican-db-create-kkx6d\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.173232 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxfjb\" (UniqueName: \"kubernetes.io/projected/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-kube-api-access-xxfjb\") pod \"neutron-db-create-74qkt\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.173292 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-operator-scripts\") pod \"neutron-db-create-74qkt\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.175725 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khgmw\" (UniqueName: \"kubernetes.io/projected/9e1315d0-712b-4119-8569-304e3bcdf8e6-kube-api-access-khgmw\") pod \"cinder-526f-account-create-update-76g79\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.184613 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5836-account-create-update-98fk7"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.275884 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90748fc1-2cb1-4e08-b531-8c835eaded23-operator-scripts\") pod \"neutron-5836-account-create-update-98fk7\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.276051 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkjmk\" (UniqueName: \"kubernetes.io/projected/90748fc1-2cb1-4e08-b531-8c835eaded23-kube-api-access-fkjmk\") pod \"neutron-5836-account-create-update-98fk7\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.276078 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxfjb\" (UniqueName: \"kubernetes.io/projected/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-kube-api-access-xxfjb\") pod \"neutron-db-create-74qkt\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.276118 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-operator-scripts\") pod \"neutron-db-create-74qkt\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.277981 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-operator-scripts\") pod \"neutron-db-create-74qkt\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.302054 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxfjb\" (UniqueName: \"kubernetes.io/projected/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-kube-api-access-xxfjb\") pod \"neutron-db-create-74qkt\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.345441 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4hrfk"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.348497 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.367992 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.368172 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.368281 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.368546 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t9xv5" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.375528 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4hrfk"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.379378 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90748fc1-2cb1-4e08-b531-8c835eaded23-operator-scripts\") pod \"neutron-5836-account-create-update-98fk7\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.379477 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkjmk\" (UniqueName: \"kubernetes.io/projected/90748fc1-2cb1-4e08-b531-8c835eaded23-kube-api-access-fkjmk\") pod \"neutron-5836-account-create-update-98fk7\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.382822 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90748fc1-2cb1-4e08-b531-8c835eaded23-operator-scripts\") pod \"neutron-5836-account-create-update-98fk7\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.399065 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkjmk\" (UniqueName: \"kubernetes.io/projected/90748fc1-2cb1-4e08-b531-8c835eaded23-kube-api-access-fkjmk\") pod \"neutron-5836-account-create-update-98fk7\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.447053 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.459454 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.481838 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-config-data\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.482179 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-combined-ca-bundle\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.482232 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj6pz\" (UniqueName: \"kubernetes.io/projected/ebe4e94c-2099-452f-ac61-828e372f18a1-kube-api-access-gj6pz\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.579269 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.583015 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-combined-ca-bundle\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.583052 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj6pz\" (UniqueName: \"kubernetes.io/projected/ebe4e94c-2099-452f-ac61-828e372f18a1-kube-api-access-gj6pz\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.583078 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-config-data\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.595239 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-combined-ca-bundle\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.595871 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-config-data\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.602820 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.620465 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj6pz\" (UniqueName: \"kubernetes.io/projected/ebe4e94c-2099-452f-ac61-828e372f18a1-kube-api-access-gj6pz\") pod \"keystone-db-sync-4hrfk\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.699715 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.842995 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3ad1-account-create-update-mvnfv"] Dec 10 13:13:32 crc kubenswrapper[4921]: I1210 13:13:32.857960 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mgjsp"] Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.121053 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kkx6d"] Dec 10 13:13:33 crc kubenswrapper[4921]: W1210 13:13:33.286760 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e1315d0_712b_4119_8569_304e3bcdf8e6.slice/crio-82d7d01a9f3d5490f782a56d59f71a45d2b89eb4542ca816e37af3c89003a388 WatchSource:0}: Error finding container 82d7d01a9f3d5490f782a56d59f71a45d2b89eb4542ca816e37af3c89003a388: Status 404 returned error can't find the container with id 82d7d01a9f3d5490f782a56d59f71a45d2b89eb4542ca816e37af3c89003a388 Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.288819 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mgjsp" event={"ID":"13330318-8c10-4f9c-9e04-1c4e6005d84e","Type":"ContainerStarted","Data":"88ed18e66536ddf14cea0069a11b923b48edffa834b362b73b4fdabf632e2835"} Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.305464 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-526f-account-create-update-76g79"] Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.313373 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kkx6d" event={"ID":"f13cffa1-660f-45b0-9207-39d7059f1ec8","Type":"ContainerStarted","Data":"c4beefc6b57c805d0ff695e7f8312c74d8eb916acc6158ae8716a3028b5ad647"} Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.320764 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3ad1-account-create-update-mvnfv" event={"ID":"d834ef72-8c06-49b5-b966-8114d30fe9de","Type":"ContainerStarted","Data":"3c06c93b9df7d295fae09c78040cbf6ed17b4aff1aeca0cf946851fb061fdf8f"} Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.345267 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4hrfk"] Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.420188 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5836-account-create-update-98fk7"] Dec 10 13:13:33 crc kubenswrapper[4921]: I1210 13:13:33.450293 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-74qkt"] Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.328663 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-74qkt" event={"ID":"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3","Type":"ContainerStarted","Data":"eec60dd7636ad4df7ef7448b8acb9de26e94908fd8f33c38c1907ed438a5d547"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.329124 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-74qkt" event={"ID":"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3","Type":"ContainerStarted","Data":"5b0001b4c03f7daa5d6c5d9c214039378667d78631374a2428e8ff8f97c7462d"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.333500 4921 generic.go:334] "Generic (PLEG): container finished" podID="13330318-8c10-4f9c-9e04-1c4e6005d84e" containerID="60c44768682bdd870ce40fe60e60103dcd9f25ccbd8d215f4f905f0449f6b6e9" exitCode=0 Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.333539 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mgjsp" event={"ID":"13330318-8c10-4f9c-9e04-1c4e6005d84e","Type":"ContainerDied","Data":"60c44768682bdd870ce40fe60e60103dcd9f25ccbd8d215f4f905f0449f6b6e9"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.335410 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5836-account-create-update-98fk7" event={"ID":"90748fc1-2cb1-4e08-b531-8c835eaded23","Type":"ContainerStarted","Data":"ea141416d8a94280d9f965754fc53c9f3d2851ea26ac3998de3c201749934a2e"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.335439 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5836-account-create-update-98fk7" event={"ID":"90748fc1-2cb1-4e08-b531-8c835eaded23","Type":"ContainerStarted","Data":"5419d420edb202fb034f57d803fe680654ed6edc21aec07e02ba80a8a43d9a68"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.336653 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4hrfk" event={"ID":"ebe4e94c-2099-452f-ac61-828e372f18a1","Type":"ContainerStarted","Data":"8e64577f4f29c381f4619484926e1c8ea92c64206bd51191c889078b8407049d"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.338625 4921 generic.go:334] "Generic (PLEG): container finished" podID="f13cffa1-660f-45b0-9207-39d7059f1ec8" containerID="6414d2ef57e73c050f6fdf5fd98f79262cd0ba793231fe2ff9a4ed4fde465179" exitCode=0 Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.338697 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kkx6d" event={"ID":"f13cffa1-660f-45b0-9207-39d7059f1ec8","Type":"ContainerDied","Data":"6414d2ef57e73c050f6fdf5fd98f79262cd0ba793231fe2ff9a4ed4fde465179"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.340421 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-526f-account-create-update-76g79" event={"ID":"9e1315d0-712b-4119-8569-304e3bcdf8e6","Type":"ContainerStarted","Data":"b8ce645bdc2eda66611a81c5f2da203a7481c58d530931f162a583b8bfa2837b"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.340462 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-526f-account-create-update-76g79" event={"ID":"9e1315d0-712b-4119-8569-304e3bcdf8e6","Type":"ContainerStarted","Data":"82d7d01a9f3d5490f782a56d59f71a45d2b89eb4542ca816e37af3c89003a388"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.347165 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3ad1-account-create-update-mvnfv" event={"ID":"d834ef72-8c06-49b5-b966-8114d30fe9de","Type":"ContainerStarted","Data":"1452b30e66dc68cadd0df3afe5303618cd96b0d67003f01ee981cbb94ee9bca3"} Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.351862 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-74qkt" podStartSLOduration=2.351842101 podStartE2EDuration="2.351842101s" podCreationTimestamp="2025-12-10 13:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:34.341939996 +0000 UTC m=+1011.558161920" watchObservedRunningTime="2025-12-10 13:13:34.351842101 +0000 UTC m=+1011.568064025" Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.361209 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5836-account-create-update-98fk7" podStartSLOduration=2.361189842 podStartE2EDuration="2.361189842s" podCreationTimestamp="2025-12-10 13:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:34.356984239 +0000 UTC m=+1011.573206183" watchObservedRunningTime="2025-12-10 13:13:34.361189842 +0000 UTC m=+1011.577411766" Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.376748 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-526f-account-create-update-76g79" podStartSLOduration=3.3767332789999998 podStartE2EDuration="3.376733279s" podCreationTimestamp="2025-12-10 13:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:34.373283727 +0000 UTC m=+1011.589505651" watchObservedRunningTime="2025-12-10 13:13:34.376733279 +0000 UTC m=+1011.592955203" Dec 10 13:13:34 crc kubenswrapper[4921]: I1210 13:13:34.429146 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-3ad1-account-create-update-mvnfv" podStartSLOduration=3.429118915 podStartE2EDuration="3.429118915s" podCreationTimestamp="2025-12-10 13:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:34.42297203 +0000 UTC m=+1011.639193944" watchObservedRunningTime="2025-12-10 13:13:34.429118915 +0000 UTC m=+1011.645340839" Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.356363 4921 generic.go:334] "Generic (PLEG): container finished" podID="544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" containerID="eec60dd7636ad4df7ef7448b8acb9de26e94908fd8f33c38c1907ed438a5d547" exitCode=0 Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.356416 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-74qkt" event={"ID":"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3","Type":"ContainerDied","Data":"eec60dd7636ad4df7ef7448b8acb9de26e94908fd8f33c38c1907ed438a5d547"} Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.357809 4921 generic.go:334] "Generic (PLEG): container finished" podID="90748fc1-2cb1-4e08-b531-8c835eaded23" containerID="ea141416d8a94280d9f965754fc53c9f3d2851ea26ac3998de3c201749934a2e" exitCode=0 Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.357853 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5836-account-create-update-98fk7" event={"ID":"90748fc1-2cb1-4e08-b531-8c835eaded23","Type":"ContainerDied","Data":"ea141416d8a94280d9f965754fc53c9f3d2851ea26ac3998de3c201749934a2e"} Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.359185 4921 generic.go:334] "Generic (PLEG): container finished" podID="9e1315d0-712b-4119-8569-304e3bcdf8e6" containerID="b8ce645bdc2eda66611a81c5f2da203a7481c58d530931f162a583b8bfa2837b" exitCode=0 Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.359235 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-526f-account-create-update-76g79" event={"ID":"9e1315d0-712b-4119-8569-304e3bcdf8e6","Type":"ContainerDied","Data":"b8ce645bdc2eda66611a81c5f2da203a7481c58d530931f162a583b8bfa2837b"} Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.364691 4921 generic.go:334] "Generic (PLEG): container finished" podID="d834ef72-8c06-49b5-b966-8114d30fe9de" containerID="1452b30e66dc68cadd0df3afe5303618cd96b0d67003f01ee981cbb94ee9bca3" exitCode=0 Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.364958 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3ad1-account-create-update-mvnfv" event={"ID":"d834ef72-8c06-49b5-b966-8114d30fe9de","Type":"ContainerDied","Data":"1452b30e66dc68cadd0df3afe5303618cd96b0d67003f01ee981cbb94ee9bca3"} Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.869134 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.877678 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.973912 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2prp\" (UniqueName: \"kubernetes.io/projected/f13cffa1-660f-45b0-9207-39d7059f1ec8-kube-api-access-h2prp\") pod \"f13cffa1-660f-45b0-9207-39d7059f1ec8\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.974013 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2shdk\" (UniqueName: \"kubernetes.io/projected/13330318-8c10-4f9c-9e04-1c4e6005d84e-kube-api-access-2shdk\") pod \"13330318-8c10-4f9c-9e04-1c4e6005d84e\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.974036 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13cffa1-660f-45b0-9207-39d7059f1ec8-operator-scripts\") pod \"f13cffa1-660f-45b0-9207-39d7059f1ec8\" (UID: \"f13cffa1-660f-45b0-9207-39d7059f1ec8\") " Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.974123 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13330318-8c10-4f9c-9e04-1c4e6005d84e-operator-scripts\") pod \"13330318-8c10-4f9c-9e04-1c4e6005d84e\" (UID: \"13330318-8c10-4f9c-9e04-1c4e6005d84e\") " Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.975247 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13330318-8c10-4f9c-9e04-1c4e6005d84e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13330318-8c10-4f9c-9e04-1c4e6005d84e" (UID: "13330318-8c10-4f9c-9e04-1c4e6005d84e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.976239 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f13cffa1-660f-45b0-9207-39d7059f1ec8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f13cffa1-660f-45b0-9207-39d7059f1ec8" (UID: "f13cffa1-660f-45b0-9207-39d7059f1ec8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.986718 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13330318-8c10-4f9c-9e04-1c4e6005d84e-kube-api-access-2shdk" (OuterVolumeSpecName: "kube-api-access-2shdk") pod "13330318-8c10-4f9c-9e04-1c4e6005d84e" (UID: "13330318-8c10-4f9c-9e04-1c4e6005d84e"). InnerVolumeSpecName "kube-api-access-2shdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:35 crc kubenswrapper[4921]: I1210 13:13:35.987142 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13cffa1-660f-45b0-9207-39d7059f1ec8-kube-api-access-h2prp" (OuterVolumeSpecName: "kube-api-access-h2prp") pod "f13cffa1-660f-45b0-9207-39d7059f1ec8" (UID: "f13cffa1-660f-45b0-9207-39d7059f1ec8"). InnerVolumeSpecName "kube-api-access-h2prp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.075930 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2shdk\" (UniqueName: \"kubernetes.io/projected/13330318-8c10-4f9c-9e04-1c4e6005d84e-kube-api-access-2shdk\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.075969 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f13cffa1-660f-45b0-9207-39d7059f1ec8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.075980 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13330318-8c10-4f9c-9e04-1c4e6005d84e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.075991 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2prp\" (UniqueName: \"kubernetes.io/projected/f13cffa1-660f-45b0-9207-39d7059f1ec8-kube-api-access-h2prp\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.375827 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mgjsp" event={"ID":"13330318-8c10-4f9c-9e04-1c4e6005d84e","Type":"ContainerDied","Data":"88ed18e66536ddf14cea0069a11b923b48edffa834b362b73b4fdabf632e2835"} Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.375866 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88ed18e66536ddf14cea0069a11b923b48edffa834b362b73b4fdabf632e2835" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.375922 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mgjsp" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.378633 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kkx6d" event={"ID":"f13cffa1-660f-45b0-9207-39d7059f1ec8","Type":"ContainerDied","Data":"c4beefc6b57c805d0ff695e7f8312c74d8eb916acc6158ae8716a3028b5ad647"} Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.378655 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4beefc6b57c805d0ff695e7f8312c74d8eb916acc6158ae8716a3028b5ad647" Dec 10 13:13:36 crc kubenswrapper[4921]: I1210 13:13:36.378826 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kkx6d" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.256503 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.269882 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.286890 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.294543 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.335941 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkjmk\" (UniqueName: \"kubernetes.io/projected/90748fc1-2cb1-4e08-b531-8c835eaded23-kube-api-access-fkjmk\") pod \"90748fc1-2cb1-4e08-b531-8c835eaded23\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.336022 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxfjb\" (UniqueName: \"kubernetes.io/projected/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-kube-api-access-xxfjb\") pod \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.336126 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90748fc1-2cb1-4e08-b531-8c835eaded23-operator-scripts\") pod \"90748fc1-2cb1-4e08-b531-8c835eaded23\" (UID: \"90748fc1-2cb1-4e08-b531-8c835eaded23\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.336148 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1315d0-712b-4119-8569-304e3bcdf8e6-operator-scripts\") pod \"9e1315d0-712b-4119-8569-304e3bcdf8e6\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.336184 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khgmw\" (UniqueName: \"kubernetes.io/projected/9e1315d0-712b-4119-8569-304e3bcdf8e6-kube-api-access-khgmw\") pod \"9e1315d0-712b-4119-8569-304e3bcdf8e6\" (UID: \"9e1315d0-712b-4119-8569-304e3bcdf8e6\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.336258 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-operator-scripts\") pod \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\" (UID: \"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.336698 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e1315d0-712b-4119-8569-304e3bcdf8e6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9e1315d0-712b-4119-8569-304e3bcdf8e6" (UID: "9e1315d0-712b-4119-8569-304e3bcdf8e6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.337581 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" (UID: "544608c5-4a34-46c5-9f36-7bd4cf7c3eb3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.337730 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90748fc1-2cb1-4e08-b531-8c835eaded23-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "90748fc1-2cb1-4e08-b531-8c835eaded23" (UID: "90748fc1-2cb1-4e08-b531-8c835eaded23"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.342712 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-kube-api-access-xxfjb" (OuterVolumeSpecName: "kube-api-access-xxfjb") pod "544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" (UID: "544608c5-4a34-46c5-9f36-7bd4cf7c3eb3"). InnerVolumeSpecName "kube-api-access-xxfjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.363533 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e1315d0-712b-4119-8569-304e3bcdf8e6-kube-api-access-khgmw" (OuterVolumeSpecName: "kube-api-access-khgmw") pod "9e1315d0-712b-4119-8569-304e3bcdf8e6" (UID: "9e1315d0-712b-4119-8569-304e3bcdf8e6"). InnerVolumeSpecName "kube-api-access-khgmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.383135 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90748fc1-2cb1-4e08-b531-8c835eaded23-kube-api-access-fkjmk" (OuterVolumeSpecName: "kube-api-access-fkjmk") pod "90748fc1-2cb1-4e08-b531-8c835eaded23" (UID: "90748fc1-2cb1-4e08-b531-8c835eaded23"). InnerVolumeSpecName "kube-api-access-fkjmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.404420 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-526f-account-create-update-76g79" event={"ID":"9e1315d0-712b-4119-8569-304e3bcdf8e6","Type":"ContainerDied","Data":"82d7d01a9f3d5490f782a56d59f71a45d2b89eb4542ca816e37af3c89003a388"} Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.404668 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82d7d01a9f3d5490f782a56d59f71a45d2b89eb4542ca816e37af3c89003a388" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.405482 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3ad1-account-create-update-mvnfv" event={"ID":"d834ef72-8c06-49b5-b966-8114d30fe9de","Type":"ContainerDied","Data":"3c06c93b9df7d295fae09c78040cbf6ed17b4aff1aeca0cf946851fb061fdf8f"} Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.405590 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c06c93b9df7d295fae09c78040cbf6ed17b4aff1aeca0cf946851fb061fdf8f" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.405538 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3ad1-account-create-update-mvnfv" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.404429 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-526f-account-create-update-76g79" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.411580 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-74qkt" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.411993 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-74qkt" event={"ID":"544608c5-4a34-46c5-9f36-7bd4cf7c3eb3","Type":"ContainerDied","Data":"5b0001b4c03f7daa5d6c5d9c214039378667d78631374a2428e8ff8f97c7462d"} Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.412019 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b0001b4c03f7daa5d6c5d9c214039378667d78631374a2428e8ff8f97c7462d" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.424582 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5836-account-create-update-98fk7" event={"ID":"90748fc1-2cb1-4e08-b531-8c835eaded23","Type":"ContainerDied","Data":"5419d420edb202fb034f57d803fe680654ed6edc21aec07e02ba80a8a43d9a68"} Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.424618 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5836-account-create-update-98fk7" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.424616 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5419d420edb202fb034f57d803fe680654ed6edc21aec07e02ba80a8a43d9a68" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.437856 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5gnw\" (UniqueName: \"kubernetes.io/projected/d834ef72-8c06-49b5-b966-8114d30fe9de-kube-api-access-l5gnw\") pod \"d834ef72-8c06-49b5-b966-8114d30fe9de\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.437941 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d834ef72-8c06-49b5-b966-8114d30fe9de-operator-scripts\") pod \"d834ef72-8c06-49b5-b966-8114d30fe9de\" (UID: \"d834ef72-8c06-49b5-b966-8114d30fe9de\") " Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438429 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d834ef72-8c06-49b5-b966-8114d30fe9de-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d834ef72-8c06-49b5-b966-8114d30fe9de" (UID: "d834ef72-8c06-49b5-b966-8114d30fe9de"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438536 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkjmk\" (UniqueName: \"kubernetes.io/projected/90748fc1-2cb1-4e08-b531-8c835eaded23-kube-api-access-fkjmk\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438553 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxfjb\" (UniqueName: \"kubernetes.io/projected/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-kube-api-access-xxfjb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438562 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90748fc1-2cb1-4e08-b531-8c835eaded23-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438592 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1315d0-712b-4119-8569-304e3bcdf8e6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438602 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khgmw\" (UniqueName: \"kubernetes.io/projected/9e1315d0-712b-4119-8569-304e3bcdf8e6-kube-api-access-khgmw\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438611 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d834ef72-8c06-49b5-b966-8114d30fe9de-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.438619 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.441651 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d834ef72-8c06-49b5-b966-8114d30fe9de-kube-api-access-l5gnw" (OuterVolumeSpecName: "kube-api-access-l5gnw") pod "d834ef72-8c06-49b5-b966-8114d30fe9de" (UID: "d834ef72-8c06-49b5-b966-8114d30fe9de"). InnerVolumeSpecName "kube-api-access-l5gnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:39 crc kubenswrapper[4921]: I1210 13:13:39.540882 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5gnw\" (UniqueName: \"kubernetes.io/projected/d834ef72-8c06-49b5-b966-8114d30fe9de-kube-api-access-l5gnw\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:40 crc kubenswrapper[4921]: I1210 13:13:40.433947 4921 generic.go:334] "Generic (PLEG): container finished" podID="99af9def-7de6-4bab-98f9-890433c3836e" containerID="b4b4a895c1597e20c0953747b4ee7a22b38b75c00abb3664de38febf2b0c1542" exitCode=0 Dec 10 13:13:40 crc kubenswrapper[4921]: I1210 13:13:40.434026 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zc8hg" event={"ID":"99af9def-7de6-4bab-98f9-890433c3836e","Type":"ContainerDied","Data":"b4b4a895c1597e20c0953747b4ee7a22b38b75c00abb3664de38febf2b0c1542"} Dec 10 13:13:40 crc kubenswrapper[4921]: I1210 13:13:40.436560 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4hrfk" event={"ID":"ebe4e94c-2099-452f-ac61-828e372f18a1","Type":"ContainerStarted","Data":"0248f3657c74122a43bc2b90085029a7b455e9e0f0c3fdf0fbb3081d883cdfc1"} Dec 10 13:13:40 crc kubenswrapper[4921]: I1210 13:13:40.469629 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4hrfk" podStartSLOduration=2.500195433 podStartE2EDuration="8.469609465s" podCreationTimestamp="2025-12-10 13:13:32 +0000 UTC" firstStartedPulling="2025-12-10 13:13:33.373274809 +0000 UTC m=+1010.589496723" lastFinishedPulling="2025-12-10 13:13:39.342688831 +0000 UTC m=+1016.558910755" observedRunningTime="2025-12-10 13:13:40.46607441 +0000 UTC m=+1017.682296334" watchObservedRunningTime="2025-12-10 13:13:40.469609465 +0000 UTC m=+1017.685831389" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.802753 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zc8hg" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.877822 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-db-sync-config-data\") pod \"99af9def-7de6-4bab-98f9-890433c3836e\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.877926 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2c8m\" (UniqueName: \"kubernetes.io/projected/99af9def-7de6-4bab-98f9-890433c3836e-kube-api-access-j2c8m\") pod \"99af9def-7de6-4bab-98f9-890433c3836e\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.877957 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-combined-ca-bundle\") pod \"99af9def-7de6-4bab-98f9-890433c3836e\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.877980 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-config-data\") pod \"99af9def-7de6-4bab-98f9-890433c3836e\" (UID: \"99af9def-7de6-4bab-98f9-890433c3836e\") " Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.883657 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99af9def-7de6-4bab-98f9-890433c3836e-kube-api-access-j2c8m" (OuterVolumeSpecName: "kube-api-access-j2c8m") pod "99af9def-7de6-4bab-98f9-890433c3836e" (UID: "99af9def-7de6-4bab-98f9-890433c3836e"). InnerVolumeSpecName "kube-api-access-j2c8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.894732 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "99af9def-7de6-4bab-98f9-890433c3836e" (UID: "99af9def-7de6-4bab-98f9-890433c3836e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.905643 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99af9def-7de6-4bab-98f9-890433c3836e" (UID: "99af9def-7de6-4bab-98f9-890433c3836e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.916262 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-config-data" (OuterVolumeSpecName: "config-data") pod "99af9def-7de6-4bab-98f9-890433c3836e" (UID: "99af9def-7de6-4bab-98f9-890433c3836e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.979942 4921 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.980221 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2c8m\" (UniqueName: \"kubernetes.io/projected/99af9def-7de6-4bab-98f9-890433c3836e-kube-api-access-j2c8m\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.980327 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:41 crc kubenswrapper[4921]: I1210 13:13:41.980458 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99af9def-7de6-4bab-98f9-890433c3836e-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.452803 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zc8hg" event={"ID":"99af9def-7de6-4bab-98f9-890433c3836e","Type":"ContainerDied","Data":"51af92cc1ee02978900c55bd1a1b09a7453c2a2f4a3bd5317dec9b50e71697fa"} Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.452842 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51af92cc1ee02978900c55bd1a1b09a7453c2a2f4a3bd5317dec9b50e71697fa" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.452850 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zc8hg" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791024 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-w2ncj"] Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791381 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13cffa1-660f-45b0-9207-39d7059f1ec8" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791410 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13cffa1-660f-45b0-9207-39d7059f1ec8" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791430 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13330318-8c10-4f9c-9e04-1c4e6005d84e" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791436 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="13330318-8c10-4f9c-9e04-1c4e6005d84e" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791448 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99af9def-7de6-4bab-98f9-890433c3836e" containerName="glance-db-sync" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791455 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="99af9def-7de6-4bab-98f9-890433c3836e" containerName="glance-db-sync" Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791473 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90748fc1-2cb1-4e08-b531-8c835eaded23" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791479 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="90748fc1-2cb1-4e08-b531-8c835eaded23" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791486 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d834ef72-8c06-49b5-b966-8114d30fe9de" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791492 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="d834ef72-8c06-49b5-b966-8114d30fe9de" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791504 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791509 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: E1210 13:13:42.791517 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e1315d0-712b-4119-8569-304e3bcdf8e6" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791523 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e1315d0-712b-4119-8569-304e3bcdf8e6" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791812 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="90748fc1-2cb1-4e08-b531-8c835eaded23" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791823 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e1315d0-712b-4119-8569-304e3bcdf8e6" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791836 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="99af9def-7de6-4bab-98f9-890433c3836e" containerName="glance-db-sync" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791843 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="13330318-8c10-4f9c-9e04-1c4e6005d84e" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791854 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="d834ef72-8c06-49b5-b966-8114d30fe9de" containerName="mariadb-account-create-update" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791869 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13cffa1-660f-45b0-9207-39d7059f1ec8" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.791889 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" containerName="mariadb-database-create" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.792848 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.802340 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-w2ncj"] Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.892669 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-config\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.892742 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.892995 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.893158 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxtb5\" (UniqueName: \"kubernetes.io/projected/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-kube-api-access-rxtb5\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.893267 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-dns-svc\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.994338 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.994470 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxtb5\" (UniqueName: \"kubernetes.io/projected/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-kube-api-access-rxtb5\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.994509 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-dns-svc\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.994533 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-config\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.994580 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.995351 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.995371 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.995955 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-dns-svc\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:42 crc kubenswrapper[4921]: I1210 13:13:42.996464 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-config\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:43 crc kubenswrapper[4921]: I1210 13:13:43.016367 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxtb5\" (UniqueName: \"kubernetes.io/projected/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-kube-api-access-rxtb5\") pod \"dnsmasq-dns-554567b4f7-w2ncj\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:43 crc kubenswrapper[4921]: I1210 13:13:43.106577 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:43 crc kubenswrapper[4921]: I1210 13:13:43.462062 4921 generic.go:334] "Generic (PLEG): container finished" podID="ebe4e94c-2099-452f-ac61-828e372f18a1" containerID="0248f3657c74122a43bc2b90085029a7b455e9e0f0c3fdf0fbb3081d883cdfc1" exitCode=0 Dec 10 13:13:43 crc kubenswrapper[4921]: I1210 13:13:43.462101 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4hrfk" event={"ID":"ebe4e94c-2099-452f-ac61-828e372f18a1","Type":"ContainerDied","Data":"0248f3657c74122a43bc2b90085029a7b455e9e0f0c3fdf0fbb3081d883cdfc1"} Dec 10 13:13:43 crc kubenswrapper[4921]: W1210 13:13:43.564286 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ff8326d_54a8_4fc4_8fe9_beabdf3889ac.slice/crio-5fa2e69b44e5a50be4c49f9e378876f10e94f0d9b3afcc773f878054eb0bc0be WatchSource:0}: Error finding container 5fa2e69b44e5a50be4c49f9e378876f10e94f0d9b3afcc773f878054eb0bc0be: Status 404 returned error can't find the container with id 5fa2e69b44e5a50be4c49f9e378876f10e94f0d9b3afcc773f878054eb0bc0be Dec 10 13:13:43 crc kubenswrapper[4921]: I1210 13:13:43.575766 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-w2ncj"] Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.471659 4921 generic.go:334] "Generic (PLEG): container finished" podID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerID="74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e" exitCode=0 Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.471713 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" event={"ID":"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac","Type":"ContainerDied","Data":"74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e"} Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.472148 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" event={"ID":"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac","Type":"ContainerStarted","Data":"5fa2e69b44e5a50be4c49f9e378876f10e94f0d9b3afcc773f878054eb0bc0be"} Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.792598 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.925597 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gj6pz\" (UniqueName: \"kubernetes.io/projected/ebe4e94c-2099-452f-ac61-828e372f18a1-kube-api-access-gj6pz\") pod \"ebe4e94c-2099-452f-ac61-828e372f18a1\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.925687 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-combined-ca-bundle\") pod \"ebe4e94c-2099-452f-ac61-828e372f18a1\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.925773 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-config-data\") pod \"ebe4e94c-2099-452f-ac61-828e372f18a1\" (UID: \"ebe4e94c-2099-452f-ac61-828e372f18a1\") " Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.930872 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe4e94c-2099-452f-ac61-828e372f18a1-kube-api-access-gj6pz" (OuterVolumeSpecName: "kube-api-access-gj6pz") pod "ebe4e94c-2099-452f-ac61-828e372f18a1" (UID: "ebe4e94c-2099-452f-ac61-828e372f18a1"). InnerVolumeSpecName "kube-api-access-gj6pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.953824 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebe4e94c-2099-452f-ac61-828e372f18a1" (UID: "ebe4e94c-2099-452f-ac61-828e372f18a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:44 crc kubenswrapper[4921]: I1210 13:13:44.973102 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-config-data" (OuterVolumeSpecName: "config-data") pod "ebe4e94c-2099-452f-ac61-828e372f18a1" (UID: "ebe4e94c-2099-452f-ac61-828e372f18a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.028504 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.028541 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebe4e94c-2099-452f-ac61-828e372f18a1-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.028553 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gj6pz\" (UniqueName: \"kubernetes.io/projected/ebe4e94c-2099-452f-ac61-828e372f18a1-kube-api-access-gj6pz\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.480206 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4hrfk" event={"ID":"ebe4e94c-2099-452f-ac61-828e372f18a1","Type":"ContainerDied","Data":"8e64577f4f29c381f4619484926e1c8ea92c64206bd51191c889078b8407049d"} Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.481185 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e64577f4f29c381f4619484926e1c8ea92c64206bd51191c889078b8407049d" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.480259 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4hrfk" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.481939 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" event={"ID":"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac","Type":"ContainerStarted","Data":"74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713"} Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.482261 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.512517 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" podStartSLOduration=3.512496493 podStartE2EDuration="3.512496493s" podCreationTimestamp="2025-12-10 13:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:45.504680593 +0000 UTC m=+1022.720902527" watchObservedRunningTime="2025-12-10 13:13:45.512496493 +0000 UTC m=+1022.728718427" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.712727 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-w2ncj"] Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.754283 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67795cd9-cfp8j"] Dec 10 13:13:45 crc kubenswrapper[4921]: E1210 13:13:45.754878 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe4e94c-2099-452f-ac61-828e372f18a1" containerName="keystone-db-sync" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.754948 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe4e94c-2099-452f-ac61-828e372f18a1" containerName="keystone-db-sync" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.755176 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe4e94c-2099-452f-ac61-828e372f18a1" containerName="keystone-db-sync" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.767801 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.801819 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-cfp8j"] Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.819434 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-9xffq"] Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.820714 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.863492 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.865855 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.865859 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t9xv5" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.866541 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.866944 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.885511 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9xffq"] Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.945787 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-config\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.945850 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt5rt\" (UniqueName: \"kubernetes.io/projected/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-kube-api-access-kt5rt\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.945885 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-dns-svc\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.945906 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-config-data\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.945972 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-fernet-keys\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.945994 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xrfn\" (UniqueName: \"kubernetes.io/projected/377b810d-b236-4836-b11e-ab2dffd082cc-kube-api-access-8xrfn\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.946021 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.946051 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-combined-ca-bundle\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.946090 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-credential-keys\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.946114 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-scripts\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:45 crc kubenswrapper[4921]: I1210 13:13:45.946141 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.046925 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt5rt\" (UniqueName: \"kubernetes.io/projected/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-kube-api-access-kt5rt\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.047280 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-dns-svc\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.047422 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-config-data\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.047577 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-fernet-keys\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.047682 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xrfn\" (UniqueName: \"kubernetes.io/projected/377b810d-b236-4836-b11e-ab2dffd082cc-kube-api-access-8xrfn\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.047798 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.047910 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-combined-ca-bundle\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.048036 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-credential-keys\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.048153 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-scripts\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.048272 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.048420 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-config\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.048072 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-dns-svc\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.048868 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.049460 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-config\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.053725 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-config-data\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.056523 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.076096 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-fernet-keys\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.080218 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-combined-ca-bundle\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.082837 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-credential-keys\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.083061 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-scripts\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.138366 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt5rt\" (UniqueName: \"kubernetes.io/projected/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-kube-api-access-kt5rt\") pod \"dnsmasq-dns-67795cd9-cfp8j\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.157479 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xrfn\" (UniqueName: \"kubernetes.io/projected/377b810d-b236-4836-b11e-ab2dffd082cc-kube-api-access-8xrfn\") pod \"keystone-bootstrap-9xffq\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.217532 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-nqvc7"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.219102 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.223841 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bqds8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.226628 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.226921 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.252451 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-nqvc7"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.266168 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-fm45n"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.273183 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.285180 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.291376 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kqhts" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.291910 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.355127 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fm45n"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.361606 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-scripts\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.361712 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/049815fe-e8f8-45c6-9360-d2d331fa8cd3-etc-machine-id\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.361750 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-config-data\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.361824 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-db-sync-config-data\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.361856 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swfrk\" (UniqueName: \"kubernetes.io/projected/049815fe-e8f8-45c6-9360-d2d331fa8cd3-kube-api-access-swfrk\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.361876 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-combined-ca-bundle\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.391876 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.438412 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-w9cf8"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.440155 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.445840 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.455183 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rcjt2" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.455652 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.455981 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.466218 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-w9cf8"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468093 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/049815fe-e8f8-45c6-9360-d2d331fa8cd3-etc-machine-id\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468134 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-config-data\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468157 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6d5n\" (UniqueName: \"kubernetes.io/projected/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-kube-api-access-j6d5n\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468178 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-config-data\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468193 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-combined-ca-bundle\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468241 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-db-sync-config-data\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468271 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swfrk\" (UniqueName: \"kubernetes.io/projected/049815fe-e8f8-45c6-9360-d2d331fa8cd3-kube-api-access-swfrk\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468287 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-combined-ca-bundle\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468312 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-combined-ca-bundle\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468330 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvntt\" (UniqueName: \"kubernetes.io/projected/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-kube-api-access-qvntt\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.468345 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-logs\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.471529 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-scripts\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.471607 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-config\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.471631 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-scripts\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.469522 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/049815fe-e8f8-45c6-9360-d2d331fa8cd3-etc-machine-id\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.501533 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.532140 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-combined-ca-bundle\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.533533 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.535196 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-config-data\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.540155 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-cfp8j"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.546322 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.546743 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.546899 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swfrk\" (UniqueName: \"kubernetes.io/projected/049815fe-e8f8-45c6-9360-d2d331fa8cd3-kube-api-access-swfrk\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.595464 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-db-sync-config-data\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.608901 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-config\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.608937 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-scripts\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.608974 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-scripts\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609041 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-log-httpd\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609130 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609176 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-config-data\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609200 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6d5n\" (UniqueName: \"kubernetes.io/projected/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-kube-api-access-j6d5n\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609230 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-run-httpd\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609260 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-combined-ca-bundle\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609378 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-config-data\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.609433 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jrch\" (UniqueName: \"kubernetes.io/projected/bc458bae-c665-435a-ae77-3b7cb34146bb-kube-api-access-8jrch\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.610209 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-scripts\") pod \"cinder-db-sync-nqvc7\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.624677 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-combined-ca-bundle\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.624735 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.624790 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvntt\" (UniqueName: \"kubernetes.io/projected/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-kube-api-access-qvntt\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.624816 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-logs\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.656051 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.656937 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-combined-ca-bundle\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.657469 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-logs\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.664423 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-config-data\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.664925 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-config\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.665201 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-combined-ca-bundle\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.669846 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-scripts\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.716015 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvntt\" (UniqueName: \"kubernetes.io/projected/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-kube-api-access-qvntt\") pod \"placement-db-sync-fm45n\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.717837 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6d5n\" (UniqueName: \"kubernetes.io/projected/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-kube-api-access-j6d5n\") pod \"neutron-db-sync-w9cf8\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.741110 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-5858x"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.742843 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758605 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-scripts\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758649 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-log-httpd\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758690 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758716 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-run-httpd\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758751 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-config-data\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758771 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jrch\" (UniqueName: \"kubernetes.io/projected/bc458bae-c665-435a-ae77-3b7cb34146bb-kube-api-access-8jrch\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.758830 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.766058 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.769279 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-log-httpd\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.769512 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-run-httpd\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.777010 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.783027 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-5858x"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.794141 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-config-data\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.806762 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-scripts\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.809999 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jrch\" (UniqueName: \"kubernetes.io/projected/bc458bae-c665-435a-ae77-3b7cb34146bb-kube-api-access-8jrch\") pod \"ceilometer-0\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.840464 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-mlljr"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.842114 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.848882 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cbw5c" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.848882 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.860628 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.860663 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.860747 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-config\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.860837 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.860863 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtf2s\" (UniqueName: \"kubernetes.io/projected/ad9192c4-9b58-4292-a701-c6cb4de2c679-kube-api-access-mtf2s\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.863571 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mlljr"] Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.885275 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.887071 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.918371 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.933328 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fm45n" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.962670 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.962737 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxkbg\" (UniqueName: \"kubernetes.io/projected/7efd750c-824b-443b-a0e3-dc57a14e928c-kube-api-access-zxkbg\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.963750 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.963789 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtf2s\" (UniqueName: \"kubernetes.io/projected/ad9192c4-9b58-4292-a701-c6cb4de2c679-kube-api-access-mtf2s\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.963814 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-db-sync-config-data\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.963901 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.963926 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.963991 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-config\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.964028 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-combined-ca-bundle\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.965761 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.968691 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-config\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.969443 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:46 crc kubenswrapper[4921]: I1210 13:13:46.986362 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtf2s\" (UniqueName: \"kubernetes.io/projected/ad9192c4-9b58-4292-a701-c6cb4de2c679-kube-api-access-mtf2s\") pod \"dnsmasq-dns-5b6dbdb6f5-5858x\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.065308 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-db-sync-config-data\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.065426 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-combined-ca-bundle\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.065476 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxkbg\" (UniqueName: \"kubernetes.io/projected/7efd750c-824b-443b-a0e3-dc57a14e928c-kube-api-access-zxkbg\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.070896 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-combined-ca-bundle\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.073988 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-db-sync-config-data\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.089608 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.108588 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxkbg\" (UniqueName: \"kubernetes.io/projected/7efd750c-824b-443b-a0e3-dc57a14e928c-kube-api-access-zxkbg\") pod \"barbican-db-sync-mlljr\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.195642 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mlljr" Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.302478 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-cfp8j"] Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.453237 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9xffq"] Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.551922 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-nqvc7"] Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.598107 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" event={"ID":"9c4c6c6d-89dd-4427-95f7-ad81cd61004b","Type":"ContainerStarted","Data":"be6f17db9923a11a45c43c60fb10d7cbeb30e6b813ab2f18bd2098631eb32bf4"} Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.601137 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9xffq" event={"ID":"377b810d-b236-4836-b11e-ab2dffd082cc","Type":"ContainerStarted","Data":"acf6b0b51c84af8f30d17aab9097f289d43ea9e015f1f121bfa33e633286b340"} Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.615259 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerName="dnsmasq-dns" containerID="cri-o://74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713" gracePeriod=10 Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.615573 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nqvc7" event={"ID":"049815fe-e8f8-45c6-9360-d2d331fa8cd3","Type":"ContainerStarted","Data":"97063519d119ecd906fd15380443b5ffb61a8f8c6a4660fea95ca53dd37eb2ea"} Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.736844 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fm45n"] Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.864443 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:13:47 crc kubenswrapper[4921]: W1210 13:13:47.865785 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc458bae_c665_435a_ae77_3b7cb34146bb.slice/crio-0f049ab65e76d724f2244d86e28a9228015a82a676aaed4e6e336cc6ae61a9c3 WatchSource:0}: Error finding container 0f049ab65e76d724f2244d86e28a9228015a82a676aaed4e6e336cc6ae61a9c3: Status 404 returned error can't find the container with id 0f049ab65e76d724f2244d86e28a9228015a82a676aaed4e6e336cc6ae61a9c3 Dec 10 13:13:47 crc kubenswrapper[4921]: W1210 13:13:47.867377 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1e88d7b_1b52_4f84_9648_61b3fc78a4f5.slice/crio-a4cfbdc4c93d06347a8c14e4d76e3513eeace3120ffc9e4863f063e2fe08f6a6 WatchSource:0}: Error finding container a4cfbdc4c93d06347a8c14e4d76e3513eeace3120ffc9e4863f063e2fe08f6a6: Status 404 returned error can't find the container with id a4cfbdc4c93d06347a8c14e4d76e3513eeace3120ffc9e4863f063e2fe08f6a6 Dec 10 13:13:47 crc kubenswrapper[4921]: I1210 13:13:47.873253 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-w9cf8"] Dec 10 13:13:47 crc kubenswrapper[4921]: E1210 13:13:47.879407 4921 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ff8326d_54a8_4fc4_8fe9_beabdf3889ac.slice/crio-conmon-74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ff8326d_54a8_4fc4_8fe9_beabdf3889ac.slice/crio-74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713.scope\": RecentStats: unable to find data in memory cache]" Dec 10 13:13:48 crc kubenswrapper[4921]: W1210 13:13:48.036217 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7efd750c_824b_443b_a0e3_dc57a14e928c.slice/crio-37db970ba62a8576d01ccde10ff9a48023d7734a78034bd09ff6453fd7a24bc3 WatchSource:0}: Error finding container 37db970ba62a8576d01ccde10ff9a48023d7734a78034bd09ff6453fd7a24bc3: Status 404 returned error can't find the container with id 37db970ba62a8576d01ccde10ff9a48023d7734a78034bd09ff6453fd7a24bc3 Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.037865 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mlljr"] Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.167257 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-5858x"] Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.361368 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.511351 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxtb5\" (UniqueName: \"kubernetes.io/projected/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-kube-api-access-rxtb5\") pod \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.511442 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-dns-svc\") pod \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.511492 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-nb\") pod \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.511515 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-sb\") pod \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.511605 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-config\") pod \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\" (UID: \"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac\") " Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.532303 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-kube-api-access-rxtb5" (OuterVolumeSpecName: "kube-api-access-rxtb5") pod "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" (UID: "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac"). InnerVolumeSpecName "kube-api-access-rxtb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.567561 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" (UID: "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.572886 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-config" (OuterVolumeSpecName: "config") pod "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" (UID: "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.580323 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" (UID: "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.583233 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" (UID: "5ff8326d-54a8-4fc4-8fe9-beabdf3889ac"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.613849 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxtb5\" (UniqueName: \"kubernetes.io/projected/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-kube-api-access-rxtb5\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.613880 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.613890 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.613898 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.613908 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.630525 4921 generic.go:334] "Generic (PLEG): container finished" podID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerID="74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713" exitCode=0 Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.630591 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" event={"ID":"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac","Type":"ContainerDied","Data":"74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.630620 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" event={"ID":"5ff8326d-54a8-4fc4-8fe9-beabdf3889ac","Type":"ContainerDied","Data":"5fa2e69b44e5a50be4c49f9e378876f10e94f0d9b3afcc773f878054eb0bc0be"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.630639 4921 scope.go:117] "RemoveContainer" containerID="74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.630795 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-w2ncj" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.634923 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fm45n" event={"ID":"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473","Type":"ContainerStarted","Data":"299459e3a855512a0e764a02673a29aaf940dd0776843a1f4ca9153da8c11ad7"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.640591 4921 generic.go:334] "Generic (PLEG): container finished" podID="9c4c6c6d-89dd-4427-95f7-ad81cd61004b" containerID="5cb4084445fd5beff7e29f05c8a73b5e62443c3eac9cf20230ea996a6175c5c2" exitCode=0 Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.640646 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" event={"ID":"9c4c6c6d-89dd-4427-95f7-ad81cd61004b","Type":"ContainerDied","Data":"5cb4084445fd5beff7e29f05c8a73b5e62443c3eac9cf20230ea996a6175c5c2"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.649372 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-w9cf8" event={"ID":"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5","Type":"ContainerStarted","Data":"81fe7a13737ffb33b1b7cc13699d92e3ae5722db7489dceacb430555d0947771"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.649675 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-w9cf8" event={"ID":"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5","Type":"ContainerStarted","Data":"a4cfbdc4c93d06347a8c14e4d76e3513eeace3120ffc9e4863f063e2fe08f6a6"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.655083 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerStarted","Data":"0f049ab65e76d724f2244d86e28a9228015a82a676aaed4e6e336cc6ae61a9c3"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.664896 4921 scope.go:117] "RemoveContainer" containerID="74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.666191 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9xffq" event={"ID":"377b810d-b236-4836-b11e-ab2dffd082cc","Type":"ContainerStarted","Data":"319ac56f724e00ff9a294ccccc30f08fa720f360b1775e1be4e911ffbaca1aa9"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.675833 4921 generic.go:334] "Generic (PLEG): container finished" podID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerID="46287e25c5d16740585bb416ff7ecd1c756533f4c11a739ed26a143af11a435b" exitCode=0 Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.675892 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" event={"ID":"ad9192c4-9b58-4292-a701-c6cb4de2c679","Type":"ContainerDied","Data":"46287e25c5d16740585bb416ff7ecd1c756533f4c11a739ed26a143af11a435b"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.675916 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" event={"ID":"ad9192c4-9b58-4292-a701-c6cb4de2c679","Type":"ContainerStarted","Data":"37aafa4c60182d22eb874e01d4ca977576c221c87d1d86050d14ed2b4cdce157"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.678400 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-w9cf8" podStartSLOduration=2.678350755 podStartE2EDuration="2.678350755s" podCreationTimestamp="2025-12-10 13:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:48.66957523 +0000 UTC m=+1025.885797154" watchObservedRunningTime="2025-12-10 13:13:48.678350755 +0000 UTC m=+1025.894572679" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.682138 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mlljr" event={"ID":"7efd750c-824b-443b-a0e3-dc57a14e928c","Type":"ContainerStarted","Data":"37db970ba62a8576d01ccde10ff9a48023d7734a78034bd09ff6453fd7a24bc3"} Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.709891 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-w2ncj"] Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.720024 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-w2ncj"] Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.722681 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-9xffq" podStartSLOduration=3.722663945 podStartE2EDuration="3.722663945s" podCreationTimestamp="2025-12-10 13:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:48.702646378 +0000 UTC m=+1025.918868322" watchObservedRunningTime="2025-12-10 13:13:48.722663945 +0000 UTC m=+1025.938885869" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.727366 4921 scope.go:117] "RemoveContainer" containerID="74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713" Dec 10 13:13:48 crc kubenswrapper[4921]: E1210 13:13:48.730447 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713\": container with ID starting with 74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713 not found: ID does not exist" containerID="74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.730486 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713"} err="failed to get container status \"74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713\": rpc error: code = NotFound desc = could not find container \"74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713\": container with ID starting with 74a8cd7b0564cd34d90dab357e4956eafe4b9a59ae7cf56d516850c65c8c6713 not found: ID does not exist" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.730514 4921 scope.go:117] "RemoveContainer" containerID="74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e" Dec 10 13:13:48 crc kubenswrapper[4921]: E1210 13:13:48.731963 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e\": container with ID starting with 74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e not found: ID does not exist" containerID="74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e" Dec 10 13:13:48 crc kubenswrapper[4921]: I1210 13:13:48.731992 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e"} err="failed to get container status \"74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e\": rpc error: code = NotFound desc = could not find container \"74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e\": container with ID starting with 74d2c612a18d55e3efa8d2af28d7ec26ebe857bbde17031b25784b3a7599de6e not found: ID does not exist" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.147924 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.206279 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" path="/var/lib/kubelet/pods/5ff8326d-54a8-4fc4-8fe9-beabdf3889ac/volumes" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.247321 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-config\") pod \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.247661 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-dns-svc\") pod \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.247679 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-sb\") pod \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.247775 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-nb\") pod \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.247867 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt5rt\" (UniqueName: \"kubernetes.io/projected/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-kube-api-access-kt5rt\") pod \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\" (UID: \"9c4c6c6d-89dd-4427-95f7-ad81cd61004b\") " Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.271973 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-kube-api-access-kt5rt" (OuterVolumeSpecName: "kube-api-access-kt5rt") pod "9c4c6c6d-89dd-4427-95f7-ad81cd61004b" (UID: "9c4c6c6d-89dd-4427-95f7-ad81cd61004b"). InnerVolumeSpecName "kube-api-access-kt5rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.287314 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9c4c6c6d-89dd-4427-95f7-ad81cd61004b" (UID: "9c4c6c6d-89dd-4427-95f7-ad81cd61004b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.295795 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-config" (OuterVolumeSpecName: "config") pod "9c4c6c6d-89dd-4427-95f7-ad81cd61004b" (UID: "9c4c6c6d-89dd-4427-95f7-ad81cd61004b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.333725 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9c4c6c6d-89dd-4427-95f7-ad81cd61004b" (UID: "9c4c6c6d-89dd-4427-95f7-ad81cd61004b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.333996 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9c4c6c6d-89dd-4427-95f7-ad81cd61004b" (UID: "9c4c6c6d-89dd-4427-95f7-ad81cd61004b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.350129 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.351417 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.351657 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.351681 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.351695 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt5rt\" (UniqueName: \"kubernetes.io/projected/9c4c6c6d-89dd-4427-95f7-ad81cd61004b-kube-api-access-kt5rt\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.699132 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" event={"ID":"ad9192c4-9b58-4292-a701-c6cb4de2c679","Type":"ContainerStarted","Data":"7bea83f6d3222728775396936253df5d0c0cd00c2a63d440f4a22b7af83a60e7"} Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.702749 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.712383 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.714253 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-cfp8j" event={"ID":"9c4c6c6d-89dd-4427-95f7-ad81cd61004b","Type":"ContainerDied","Data":"be6f17db9923a11a45c43c60fb10d7cbeb30e6b813ab2f18bd2098631eb32bf4"} Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.714307 4921 scope.go:117] "RemoveContainer" containerID="5cb4084445fd5beff7e29f05c8a73b5e62443c3eac9cf20230ea996a6175c5c2" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.765920 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" podStartSLOduration=3.765900262 podStartE2EDuration="3.765900262s" podCreationTimestamp="2025-12-10 13:13:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:49.728824528 +0000 UTC m=+1026.945046462" watchObservedRunningTime="2025-12-10 13:13:49.765900262 +0000 UTC m=+1026.982122186" Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.793007 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-cfp8j"] Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:49.804228 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-cfp8j"] Dec 10 13:13:50 crc kubenswrapper[4921]: I1210 13:13:50.154314 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:13:51 crc kubenswrapper[4921]: I1210 13:13:51.204060 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c4c6c6d-89dd-4427-95f7-ad81cd61004b" path="/var/lib/kubelet/pods/9c4c6c6d-89dd-4427-95f7-ad81cd61004b/volumes" Dec 10 13:13:53 crc kubenswrapper[4921]: I1210 13:13:53.754118 4921 generic.go:334] "Generic (PLEG): container finished" podID="377b810d-b236-4836-b11e-ab2dffd082cc" containerID="319ac56f724e00ff9a294ccccc30f08fa720f360b1775e1be4e911ffbaca1aa9" exitCode=0 Dec 10 13:13:53 crc kubenswrapper[4921]: I1210 13:13:53.754170 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9xffq" event={"ID":"377b810d-b236-4836-b11e-ab2dffd082cc","Type":"ContainerDied","Data":"319ac56f724e00ff9a294ccccc30f08fa720f360b1775e1be4e911ffbaca1aa9"} Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.091528 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.158351 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ns77l"] Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.158640 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-ns77l" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="dnsmasq-dns" containerID="cri-o://1fc9773bac597bbf2a49c255dcb232febc0db6995f6601c2a733b9c4fddc9557" gracePeriod=10 Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.580037 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.709770 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-config-data\") pod \"377b810d-b236-4836-b11e-ab2dffd082cc\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.709818 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-credential-keys\") pod \"377b810d-b236-4836-b11e-ab2dffd082cc\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.709846 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-combined-ca-bundle\") pod \"377b810d-b236-4836-b11e-ab2dffd082cc\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.709894 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-scripts\") pod \"377b810d-b236-4836-b11e-ab2dffd082cc\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.709928 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xrfn\" (UniqueName: \"kubernetes.io/projected/377b810d-b236-4836-b11e-ab2dffd082cc-kube-api-access-8xrfn\") pod \"377b810d-b236-4836-b11e-ab2dffd082cc\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.709999 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-fernet-keys\") pod \"377b810d-b236-4836-b11e-ab2dffd082cc\" (UID: \"377b810d-b236-4836-b11e-ab2dffd082cc\") " Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.717852 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/377b810d-b236-4836-b11e-ab2dffd082cc-kube-api-access-8xrfn" (OuterVolumeSpecName: "kube-api-access-8xrfn") pod "377b810d-b236-4836-b11e-ab2dffd082cc" (UID: "377b810d-b236-4836-b11e-ab2dffd082cc"). InnerVolumeSpecName "kube-api-access-8xrfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.724955 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "377b810d-b236-4836-b11e-ab2dffd082cc" (UID: "377b810d-b236-4836-b11e-ab2dffd082cc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.728740 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-scripts" (OuterVolumeSpecName: "scripts") pod "377b810d-b236-4836-b11e-ab2dffd082cc" (UID: "377b810d-b236-4836-b11e-ab2dffd082cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.730271 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "377b810d-b236-4836-b11e-ab2dffd082cc" (UID: "377b810d-b236-4836-b11e-ab2dffd082cc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.744125 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "377b810d-b236-4836-b11e-ab2dffd082cc" (UID: "377b810d-b236-4836-b11e-ab2dffd082cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.770651 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-config-data" (OuterVolumeSpecName: "config-data") pod "377b810d-b236-4836-b11e-ab2dffd082cc" (UID: "377b810d-b236-4836-b11e-ab2dffd082cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.796634 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9xffq" event={"ID":"377b810d-b236-4836-b11e-ab2dffd082cc","Type":"ContainerDied","Data":"acf6b0b51c84af8f30d17aab9097f289d43ea9e015f1f121bfa33e633286b340"} Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.796673 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acf6b0b51c84af8f30d17aab9097f289d43ea9e015f1f121bfa33e633286b340" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.796733 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9xffq" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.810477 4921 generic.go:334] "Generic (PLEG): container finished" podID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerID="1fc9773bac597bbf2a49c255dcb232febc0db6995f6601c2a733b9c4fddc9557" exitCode=0 Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.810529 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ns77l" event={"ID":"f88eabcd-9eab-4760-b3b7-3d07479f164c","Type":"ContainerDied","Data":"1fc9773bac597bbf2a49c255dcb232febc0db6995f6601c2a733b9c4fddc9557"} Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.811657 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.811684 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xrfn\" (UniqueName: \"kubernetes.io/projected/377b810d-b236-4836-b11e-ab2dffd082cc-kube-api-access-8xrfn\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.811695 4921 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.811705 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.811713 4921 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:57 crc kubenswrapper[4921]: I1210 13:13:57.811721 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377b810d-b236-4836-b11e-ab2dffd082cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.545755 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-ns77l" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.691802 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-9xffq"] Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.693748 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-9xffq"] Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.780763 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-2ztsg"] Dec 10 13:13:58 crc kubenswrapper[4921]: E1210 13:13:58.782021 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerName="dnsmasq-dns" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.782087 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerName="dnsmasq-dns" Dec 10 13:13:58 crc kubenswrapper[4921]: E1210 13:13:58.782114 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="377b810d-b236-4836-b11e-ab2dffd082cc" containerName="keystone-bootstrap" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.782138 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="377b810d-b236-4836-b11e-ab2dffd082cc" containerName="keystone-bootstrap" Dec 10 13:13:58 crc kubenswrapper[4921]: E1210 13:13:58.782163 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerName="init" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.782174 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerName="init" Dec 10 13:13:58 crc kubenswrapper[4921]: E1210 13:13:58.782201 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c4c6c6d-89dd-4427-95f7-ad81cd61004b" containerName="init" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.782209 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c4c6c6d-89dd-4427-95f7-ad81cd61004b" containerName="init" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.786216 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c4c6c6d-89dd-4427-95f7-ad81cd61004b" containerName="init" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.786300 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ff8326d-54a8-4fc4-8fe9-beabdf3889ac" containerName="dnsmasq-dns" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.786328 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="377b810d-b236-4836-b11e-ab2dffd082cc" containerName="keystone-bootstrap" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.788015 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.793100 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.793180 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t9xv5" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.793120 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.795592 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.795801 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.805230 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2ztsg"] Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.828805 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-scripts\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.835786 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-combined-ca-bundle\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.835816 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w4bl\" (UniqueName: \"kubernetes.io/projected/99b2821d-2229-4c9e-8b01-699b20d6d65e-kube-api-access-6w4bl\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.835851 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-fernet-keys\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.835891 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-config-data\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.836023 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-credential-keys\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.937937 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-fernet-keys\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.939773 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-config-data\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.939898 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-credential-keys\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.940264 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-scripts\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.940362 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-combined-ca-bundle\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.940466 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w4bl\" (UniqueName: \"kubernetes.io/projected/99b2821d-2229-4c9e-8b01-699b20d6d65e-kube-api-access-6w4bl\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.944324 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-scripts\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.944855 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-config-data\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.944945 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-fernet-keys\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.946556 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-combined-ca-bundle\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.951905 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-credential-keys\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.957543 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w4bl\" (UniqueName: \"kubernetes.io/projected/99b2821d-2229-4c9e-8b01-699b20d6d65e-kube-api-access-6w4bl\") pod \"keystone-bootstrap-2ztsg\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:58 crc kubenswrapper[4921]: I1210 13:13:58.970432 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.042119 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pshck\" (UniqueName: \"kubernetes.io/projected/f88eabcd-9eab-4760-b3b7-3d07479f164c-kube-api-access-pshck\") pod \"f88eabcd-9eab-4760-b3b7-3d07479f164c\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.042161 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-config\") pod \"f88eabcd-9eab-4760-b3b7-3d07479f164c\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.042288 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-dns-svc\") pod \"f88eabcd-9eab-4760-b3b7-3d07479f164c\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.042430 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-sb\") pod \"f88eabcd-9eab-4760-b3b7-3d07479f164c\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.042454 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-nb\") pod \"f88eabcd-9eab-4760-b3b7-3d07479f164c\" (UID: \"f88eabcd-9eab-4760-b3b7-3d07479f164c\") " Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.052572 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88eabcd-9eab-4760-b3b7-3d07479f164c-kube-api-access-pshck" (OuterVolumeSpecName: "kube-api-access-pshck") pod "f88eabcd-9eab-4760-b3b7-3d07479f164c" (UID: "f88eabcd-9eab-4760-b3b7-3d07479f164c"). InnerVolumeSpecName "kube-api-access-pshck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.096840 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f88eabcd-9eab-4760-b3b7-3d07479f164c" (UID: "f88eabcd-9eab-4760-b3b7-3d07479f164c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.100926 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f88eabcd-9eab-4760-b3b7-3d07479f164c" (UID: "f88eabcd-9eab-4760-b3b7-3d07479f164c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.104111 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-config" (OuterVolumeSpecName: "config") pod "f88eabcd-9eab-4760-b3b7-3d07479f164c" (UID: "f88eabcd-9eab-4760-b3b7-3d07479f164c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.126750 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f88eabcd-9eab-4760-b3b7-3d07479f164c" (UID: "f88eabcd-9eab-4760-b3b7-3d07479f164c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.136898 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.184810 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pshck\" (UniqueName: \"kubernetes.io/projected/f88eabcd-9eab-4760-b3b7-3d07479f164c-kube-api-access-pshck\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.185134 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.185144 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.185154 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.185162 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88eabcd-9eab-4760-b3b7-3d07479f164c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.204417 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="377b810d-b236-4836-b11e-ab2dffd082cc" path="/var/lib/kubelet/pods/377b810d-b236-4836-b11e-ab2dffd082cc/volumes" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.586082 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2ztsg"] Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.846636 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fm45n" event={"ID":"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473","Type":"ContainerStarted","Data":"cab9ecb846d4d5646db806729a6cee52da95c7302f0fa93b4a45ccde471ba547"} Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.850007 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2ztsg" event={"ID":"99b2821d-2229-4c9e-8b01-699b20d6d65e","Type":"ContainerStarted","Data":"914102e9bbdf78748893ba40fbf1646a5cd324532d8bfac706c999cf0f611174"} Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.850034 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2ztsg" event={"ID":"99b2821d-2229-4c9e-8b01-699b20d6d65e","Type":"ContainerStarted","Data":"0be63ddb4542a8a8141ecc554d28848e39b1f03cedb7b6055e3bca505b80b147"} Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.851936 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerStarted","Data":"125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3"} Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.853641 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mlljr" event={"ID":"7efd750c-824b-443b-a0e3-dc57a14e928c","Type":"ContainerStarted","Data":"89b36f5012a2e6af1c4107db3f04f3989e759a873cfe37fdd7729e56d6414781"} Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.862003 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-fm45n" podStartSLOduration=2.9701154560000003 podStartE2EDuration="13.861986115s" podCreationTimestamp="2025-12-10 13:13:46 +0000 UTC" firstStartedPulling="2025-12-10 13:13:47.743170358 +0000 UTC m=+1024.959392282" lastFinishedPulling="2025-12-10 13:13:58.635041017 +0000 UTC m=+1035.851262941" observedRunningTime="2025-12-10 13:13:59.861876912 +0000 UTC m=+1037.078098846" watchObservedRunningTime="2025-12-10 13:13:59.861986115 +0000 UTC m=+1037.078208039" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.871284 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ns77l" event={"ID":"f88eabcd-9eab-4760-b3b7-3d07479f164c","Type":"ContainerDied","Data":"43eef6fb50fd44d7f742addd7093f6b257e97d3fa7ff0098afe7c1a158a39f97"} Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.871328 4921 scope.go:117] "RemoveContainer" containerID="1fc9773bac597bbf2a49c255dcb232febc0db6995f6601c2a733b9c4fddc9557" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.871683 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ns77l" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.881911 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-mlljr" podStartSLOduration=3.2800099129999998 podStartE2EDuration="13.881891549s" podCreationTimestamp="2025-12-10 13:13:46 +0000 UTC" firstStartedPulling="2025-12-10 13:13:48.039663435 +0000 UTC m=+1025.255885359" lastFinishedPulling="2025-12-10 13:13:58.641545071 +0000 UTC m=+1035.857766995" observedRunningTime="2025-12-10 13:13:59.877807239 +0000 UTC m=+1037.094029163" watchObservedRunningTime="2025-12-10 13:13:59.881891549 +0000 UTC m=+1037.098113473" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.908267 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-2ztsg" podStartSLOduration=1.9082482060000001 podStartE2EDuration="1.908248206s" podCreationTimestamp="2025-12-10 13:13:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:13:59.899949784 +0000 UTC m=+1037.116171708" watchObservedRunningTime="2025-12-10 13:13:59.908248206 +0000 UTC m=+1037.124470130" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.928953 4921 scope.go:117] "RemoveContainer" containerID="1c26313d1317751dda9afc9448652f5180b6d63a8b3fc52d42240ca787879766" Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.976263 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ns77l"] Dec 10 13:13:59 crc kubenswrapper[4921]: I1210 13:13:59.988611 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ns77l"] Dec 10 13:14:00 crc kubenswrapper[4921]: I1210 13:14:00.887995 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerStarted","Data":"ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0"} Dec 10 13:14:01 crc kubenswrapper[4921]: I1210 13:14:01.203222 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" path="/var/lib/kubelet/pods/f88eabcd-9eab-4760-b3b7-3d07479f164c/volumes" Dec 10 13:14:05 crc kubenswrapper[4921]: I1210 13:14:05.958719 4921 generic.go:334] "Generic (PLEG): container finished" podID="cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" containerID="cab9ecb846d4d5646db806729a6cee52da95c7302f0fa93b4a45ccde471ba547" exitCode=0 Dec 10 13:14:05 crc kubenswrapper[4921]: I1210 13:14:05.958822 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fm45n" event={"ID":"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473","Type":"ContainerDied","Data":"cab9ecb846d4d5646db806729a6cee52da95c7302f0fa93b4a45ccde471ba547"} Dec 10 13:14:05 crc kubenswrapper[4921]: I1210 13:14:05.960616 4921 generic.go:334] "Generic (PLEG): container finished" podID="99b2821d-2229-4c9e-8b01-699b20d6d65e" containerID="914102e9bbdf78748893ba40fbf1646a5cd324532d8bfac706c999cf0f611174" exitCode=0 Dec 10 13:14:05 crc kubenswrapper[4921]: I1210 13:14:05.960641 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2ztsg" event={"ID":"99b2821d-2229-4c9e-8b01-699b20d6d65e","Type":"ContainerDied","Data":"914102e9bbdf78748893ba40fbf1646a5cd324532d8bfac706c999cf0f611174"} Dec 10 13:14:06 crc kubenswrapper[4921]: I1210 13:14:06.970536 4921 generic.go:334] "Generic (PLEG): container finished" podID="7efd750c-824b-443b-a0e3-dc57a14e928c" containerID="89b36f5012a2e6af1c4107db3f04f3989e759a873cfe37fdd7729e56d6414781" exitCode=0 Dec 10 13:14:06 crc kubenswrapper[4921]: I1210 13:14:06.970602 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mlljr" event={"ID":"7efd750c-824b-443b-a0e3-dc57a14e928c","Type":"ContainerDied","Data":"89b36f5012a2e6af1c4107db3f04f3989e759a873cfe37fdd7729e56d6414781"} Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.567450 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fm45n" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.680230 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-scripts\") pod \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.680304 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-config-data\") pod \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.680369 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-logs\") pod \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.681119 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-logs" (OuterVolumeSpecName: "logs") pod "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" (UID: "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.680445 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvntt\" (UniqueName: \"kubernetes.io/projected/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-kube-api-access-qvntt\") pod \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.681278 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-combined-ca-bundle\") pod \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\" (UID: \"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473\") " Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.682192 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.696102 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-kube-api-access-qvntt" (OuterVolumeSpecName: "kube-api-access-qvntt") pod "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" (UID: "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473"). InnerVolumeSpecName "kube-api-access-qvntt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.696293 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-scripts" (OuterVolumeSpecName: "scripts") pod "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" (UID: "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.709788 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" (UID: "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.721203 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-config-data" (OuterVolumeSpecName: "config-data") pod "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" (UID: "cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.784153 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.784186 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.784196 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvntt\" (UniqueName: \"kubernetes.io/projected/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-kube-api-access-qvntt\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.784210 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.979607 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fm45n" event={"ID":"cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473","Type":"ContainerDied","Data":"299459e3a855512a0e764a02673a29aaf940dd0776843a1f4ca9153da8c11ad7"} Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.979648 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="299459e3a855512a0e764a02673a29aaf940dd0776843a1f4ca9153da8c11ad7" Dec 10 13:14:07 crc kubenswrapper[4921]: I1210 13:14:07.979648 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fm45n" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.069077 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5f895c947d-lxwdl"] Dec 10 13:14:08 crc kubenswrapper[4921]: E1210 13:14:08.069634 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="dnsmasq-dns" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.069739 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="dnsmasq-dns" Dec 10 13:14:08 crc kubenswrapper[4921]: E1210 13:14:08.069806 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" containerName="placement-db-sync" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.069864 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" containerName="placement-db-sync" Dec 10 13:14:08 crc kubenswrapper[4921]: E1210 13:14:08.069927 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="init" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.069973 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="init" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.070193 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f88eabcd-9eab-4760-b3b7-3d07479f164c" containerName="dnsmasq-dns" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.070270 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" containerName="placement-db-sync" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.071111 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.078466 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.078776 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.078930 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kqhts" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.079036 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.079265 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.088529 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f895c947d-lxwdl"] Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.209022 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-config-data\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.209152 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hv4v\" (UniqueName: \"kubernetes.io/projected/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-kube-api-access-2hv4v\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.209189 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-public-tls-certs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.209338 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-logs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.209402 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-scripts\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.209942 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-combined-ca-bundle\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.210261 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-internal-tls-certs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312499 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-internal-tls-certs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312709 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-config-data\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312752 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hv4v\" (UniqueName: \"kubernetes.io/projected/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-kube-api-access-2hv4v\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312777 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-public-tls-certs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312859 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-logs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312924 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-scripts\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.312969 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-combined-ca-bundle\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.314519 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-logs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.321075 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-public-tls-certs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.330413 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-config-data\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.331503 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-internal-tls-certs\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.331633 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-combined-ca-bundle\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.331913 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-scripts\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.341462 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hv4v\" (UniqueName: \"kubernetes.io/projected/f8ebbb77-6cb6-44f7-86cd-42bf505379ae-kube-api-access-2hv4v\") pod \"placement-5f895c947d-lxwdl\" (UID: \"f8ebbb77-6cb6-44f7-86cd-42bf505379ae\") " pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:08 crc kubenswrapper[4921]: I1210 13:14:08.397354 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:15 crc kubenswrapper[4921]: I1210 13:14:15.052771 4921 generic.go:334] "Generic (PLEG): container finished" podID="f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" containerID="81fe7a13737ffb33b1b7cc13699d92e3ae5722db7489dceacb430555d0947771" exitCode=0 Dec 10 13:14:15 crc kubenswrapper[4921]: I1210 13:14:15.052842 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-w9cf8" event={"ID":"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5","Type":"ContainerDied","Data":"81fe7a13737ffb33b1b7cc13699d92e3ae5722db7489dceacb430555d0947771"} Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.254827 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.261601 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mlljr" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.266006 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397266 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-combined-ca-bundle\") pod \"7efd750c-824b-443b-a0e3-dc57a14e928c\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397629 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-config\") pod \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397711 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-combined-ca-bundle\") pod \"99b2821d-2229-4c9e-8b01-699b20d6d65e\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397757 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-db-sync-config-data\") pod \"7efd750c-824b-443b-a0e3-dc57a14e928c\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397797 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-credential-keys\") pod \"99b2821d-2229-4c9e-8b01-699b20d6d65e\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397848 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6d5n\" (UniqueName: \"kubernetes.io/projected/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-kube-api-access-j6d5n\") pod \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397878 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-combined-ca-bundle\") pod \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\" (UID: \"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397901 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w4bl\" (UniqueName: \"kubernetes.io/projected/99b2821d-2229-4c9e-8b01-699b20d6d65e-kube-api-access-6w4bl\") pod \"99b2821d-2229-4c9e-8b01-699b20d6d65e\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397963 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxkbg\" (UniqueName: \"kubernetes.io/projected/7efd750c-824b-443b-a0e3-dc57a14e928c-kube-api-access-zxkbg\") pod \"7efd750c-824b-443b-a0e3-dc57a14e928c\" (UID: \"7efd750c-824b-443b-a0e3-dc57a14e928c\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.397997 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-config-data\") pod \"99b2821d-2229-4c9e-8b01-699b20d6d65e\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.398020 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-fernet-keys\") pod \"99b2821d-2229-4c9e-8b01-699b20d6d65e\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.398066 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-scripts\") pod \"99b2821d-2229-4c9e-8b01-699b20d6d65e\" (UID: \"99b2821d-2229-4c9e-8b01-699b20d6d65e\") " Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.403811 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-kube-api-access-j6d5n" (OuterVolumeSpecName: "kube-api-access-j6d5n") pod "f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" (UID: "f1e88d7b-1b52-4f84-9648-61b3fc78a4f5"). InnerVolumeSpecName "kube-api-access-j6d5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.404532 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-scripts" (OuterVolumeSpecName: "scripts") pod "99b2821d-2229-4c9e-8b01-699b20d6d65e" (UID: "99b2821d-2229-4c9e-8b01-699b20d6d65e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.405076 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7efd750c-824b-443b-a0e3-dc57a14e928c-kube-api-access-zxkbg" (OuterVolumeSpecName: "kube-api-access-zxkbg") pod "7efd750c-824b-443b-a0e3-dc57a14e928c" (UID: "7efd750c-824b-443b-a0e3-dc57a14e928c"). InnerVolumeSpecName "kube-api-access-zxkbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.407294 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "99b2821d-2229-4c9e-8b01-699b20d6d65e" (UID: "99b2821d-2229-4c9e-8b01-699b20d6d65e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.408602 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7efd750c-824b-443b-a0e3-dc57a14e928c" (UID: "7efd750c-824b-443b-a0e3-dc57a14e928c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.420720 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99b2821d-2229-4c9e-8b01-699b20d6d65e-kube-api-access-6w4bl" (OuterVolumeSpecName: "kube-api-access-6w4bl") pod "99b2821d-2229-4c9e-8b01-699b20d6d65e" (UID: "99b2821d-2229-4c9e-8b01-699b20d6d65e"). InnerVolumeSpecName "kube-api-access-6w4bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.426742 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "99b2821d-2229-4c9e-8b01-699b20d6d65e" (UID: "99b2821d-2229-4c9e-8b01-699b20d6d65e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.429603 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7efd750c-824b-443b-a0e3-dc57a14e928c" (UID: "7efd750c-824b-443b-a0e3-dc57a14e928c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.429843 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99b2821d-2229-4c9e-8b01-699b20d6d65e" (UID: "99b2821d-2229-4c9e-8b01-699b20d6d65e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.431157 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-config" (OuterVolumeSpecName: "config") pod "f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" (UID: "f1e88d7b-1b52-4f84-9648-61b3fc78a4f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.440704 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-config-data" (OuterVolumeSpecName: "config-data") pod "99b2821d-2229-4c9e-8b01-699b20d6d65e" (UID: "99b2821d-2229-4c9e-8b01-699b20d6d65e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.444012 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" (UID: "f1e88d7b-1b52-4f84-9648-61b3fc78a4f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499818 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499856 4921 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499868 4921 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499947 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6d5n\" (UniqueName: \"kubernetes.io/projected/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-kube-api-access-j6d5n\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499963 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499974 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w4bl\" (UniqueName: \"kubernetes.io/projected/99b2821d-2229-4c9e-8b01-699b20d6d65e-kube-api-access-6w4bl\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.499999 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxkbg\" (UniqueName: \"kubernetes.io/projected/7efd750c-824b-443b-a0e3-dc57a14e928c-kube-api-access-zxkbg\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.500010 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.500020 4921 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.500031 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99b2821d-2229-4c9e-8b01-699b20d6d65e-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.500043 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7efd750c-824b-443b-a0e3-dc57a14e928c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:19 crc kubenswrapper[4921]: I1210 13:14:19.500052 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.117215 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mlljr" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.117211 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mlljr" event={"ID":"7efd750c-824b-443b-a0e3-dc57a14e928c","Type":"ContainerDied","Data":"37db970ba62a8576d01ccde10ff9a48023d7734a78034bd09ff6453fd7a24bc3"} Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.117786 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37db970ba62a8576d01ccde10ff9a48023d7734a78034bd09ff6453fd7a24bc3" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.119875 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-w9cf8" event={"ID":"f1e88d7b-1b52-4f84-9648-61b3fc78a4f5","Type":"ContainerDied","Data":"a4cfbdc4c93d06347a8c14e4d76e3513eeace3120ffc9e4863f063e2fe08f6a6"} Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.119918 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4cfbdc4c93d06347a8c14e4d76e3513eeace3120ffc9e4863f063e2fe08f6a6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.119914 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-w9cf8" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.122271 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2ztsg" event={"ID":"99b2821d-2229-4c9e-8b01-699b20d6d65e","Type":"ContainerDied","Data":"0be63ddb4542a8a8141ecc554d28848e39b1f03cedb7b6055e3bca505b80b147"} Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.122330 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0be63ddb4542a8a8141ecc554d28848e39b1f03cedb7b6055e3bca505b80b147" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.122568 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2ztsg" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361216 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-565cd4b5db-xfcb6"] Dec 10 13:14:20 crc kubenswrapper[4921]: E1210 13:14:20.361575 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" containerName="neutron-db-sync" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361587 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" containerName="neutron-db-sync" Dec 10 13:14:20 crc kubenswrapper[4921]: E1210 13:14:20.361604 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7efd750c-824b-443b-a0e3-dc57a14e928c" containerName="barbican-db-sync" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361610 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="7efd750c-824b-443b-a0e3-dc57a14e928c" containerName="barbican-db-sync" Dec 10 13:14:20 crc kubenswrapper[4921]: E1210 13:14:20.361630 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99b2821d-2229-4c9e-8b01-699b20d6d65e" containerName="keystone-bootstrap" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361636 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="99b2821d-2229-4c9e-8b01-699b20d6d65e" containerName="keystone-bootstrap" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361771 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="99b2821d-2229-4c9e-8b01-699b20d6d65e" containerName="keystone-bootstrap" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361790 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="7efd750c-824b-443b-a0e3-dc57a14e928c" containerName="barbican-db-sync" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.361801 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" containerName="neutron-db-sync" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.362350 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.369504 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.369792 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.369919 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.370077 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.370194 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t9xv5" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.370299 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.394364 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-565cd4b5db-xfcb6"] Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.531614 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-fernet-keys\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.531969 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-scripts\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.532016 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knxsp\" (UniqueName: \"kubernetes.io/projected/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-kube-api-access-knxsp\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.532056 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-credential-keys\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.532107 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-public-tls-certs\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.532142 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-combined-ca-bundle\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.532175 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-config-data\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.532199 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-internal-tls-certs\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: E1210 13:14:20.542721 4921 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 10 13:14:20 crc kubenswrapper[4921]: E1210 13:14:20.542866 4921 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-swfrk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-nqvc7_openstack(049815fe-e8f8-45c6-9360-d2d331fa8cd3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 13:14:20 crc kubenswrapper[4921]: E1210 13:14:20.545555 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-nqvc7" podUID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.615749 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5db748cdcf-qnswn"] Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.620597 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.625236 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.625452 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.625559 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cbw5c" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639177 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-fernet-keys\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639215 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-scripts\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639241 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knxsp\" (UniqueName: \"kubernetes.io/projected/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-kube-api-access-knxsp\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639275 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-credential-keys\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639301 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-public-tls-certs\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639333 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-combined-ca-bundle\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639350 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-config-data\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.639372 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-internal-tls-certs\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.665370 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-internal-tls-certs\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.666356 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-scripts\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.667005 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-credential-keys\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.670218 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-combined-ca-bundle\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.684251 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-public-tls-certs\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.684859 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-config-data\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.703288 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5db748cdcf-qnswn"] Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.727016 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-fernet-keys\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.741495 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-config-data-custom\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.741895 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-config-data\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.742018 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnrxt\" (UniqueName: \"kubernetes.io/projected/1e30b467-3355-49fe-862d-9a79cdf0a35b-kube-api-access-cnrxt\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.742070 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e30b467-3355-49fe-862d-9a79cdf0a35b-logs\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.742118 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-combined-ca-bundle\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.770095 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knxsp\" (UniqueName: \"kubernetes.io/projected/2d4b5545-ad15-4ed1-b655-61fa51deb3d7-kube-api-access-knxsp\") pod \"keystone-565cd4b5db-xfcb6\" (UID: \"2d4b5545-ad15-4ed1-b655-61fa51deb3d7\") " pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.801557 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5fdffd7d58-bx6fh"] Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.812358 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.815674 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900350 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-config-data-custom\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900417 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-config-data\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900449 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-config-data-custom\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900478 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bad2725-64db-4468-b2c2-a1454f632ed9-logs\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900517 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-combined-ca-bundle\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900551 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-config-data\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900576 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnrxt\" (UniqueName: \"kubernetes.io/projected/1e30b467-3355-49fe-862d-9a79cdf0a35b-kube-api-access-cnrxt\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900604 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e30b467-3355-49fe-862d-9a79cdf0a35b-logs\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900635 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-combined-ca-bundle\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.900676 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rslh6\" (UniqueName: \"kubernetes.io/projected/6bad2725-64db-4468-b2c2-a1454f632ed9-kube-api-access-rslh6\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.905143 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e30b467-3355-49fe-862d-9a79cdf0a35b-logs\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.918908 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-config-data\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.925125 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-combined-ca-bundle\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.935082 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e30b467-3355-49fe-862d-9a79cdf0a35b-config-data-custom\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.957762 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6klql"] Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.982297 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5fdffd7d58-bx6fh"] Dec 10 13:14:20 crc kubenswrapper[4921]: I1210 13:14:20.982446 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.001155 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnrxt\" (UniqueName: \"kubernetes.io/projected/1e30b467-3355-49fe-862d-9a79cdf0a35b-kube-api-access-cnrxt\") pod \"barbican-worker-5db748cdcf-qnswn\" (UID: \"1e30b467-3355-49fe-862d-9a79cdf0a35b\") " pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.002680 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rslh6\" (UniqueName: \"kubernetes.io/projected/6bad2725-64db-4468-b2c2-a1454f632ed9-kube-api-access-rslh6\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.002833 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.002918 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003024 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-config-data-custom\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003128 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bad2725-64db-4468-b2c2-a1454f632ed9-logs\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003213 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-combined-ca-bundle\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003289 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-config\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003381 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-config-data\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003545 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.003633 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grc8d\" (UniqueName: \"kubernetes.io/projected/a754d7d4-7b14-49f8-b934-cd4adf91840e-kube-api-access-grc8d\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.012067 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bad2725-64db-4468-b2c2-a1454f632ed9-logs\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.020283 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-config-data\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.021025 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-combined-ca-bundle\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.022044 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6klql"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.029181 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6bad2725-64db-4468-b2c2-a1454f632ed9-config-data-custom\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.057065 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.068376 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6klql"] Dec 10 13:14:21 crc kubenswrapper[4921]: E1210 13:14:21.071661 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-grc8d ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-5f66db59b9-6klql" podUID="a754d7d4-7b14-49f8-b934-cd4adf91840e" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.072007 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5db748cdcf-qnswn" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.090044 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rslh6\" (UniqueName: \"kubernetes.io/projected/6bad2725-64db-4468-b2c2-a1454f632ed9-kube-api-access-rslh6\") pod \"barbican-keystone-listener-5fdffd7d58-bx6fh\" (UID: \"6bad2725-64db-4468-b2c2-a1454f632ed9\") " pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.105479 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.105516 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.105582 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-config\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.105608 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.105635 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grc8d\" (UniqueName: \"kubernetes.io/projected/a754d7d4-7b14-49f8-b934-cd4adf91840e-kube-api-access-grc8d\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.110235 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-config\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.110866 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.111432 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.129206 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f7c76d556-mfrfz"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.130944 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.131920 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.145905 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-869f779d85-gbgn5"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.148569 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.153362 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f7c76d556-mfrfz"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.153760 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.155507 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.155705 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.155878 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rcjt2" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.168693 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.179769 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grc8d\" (UniqueName: \"kubernetes.io/projected/a754d7d4-7b14-49f8-b934-cd4adf91840e-kube-api-access-grc8d\") pod \"dnsmasq-dns-5f66db59b9-6klql\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.272791 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-gbgn5"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.272839 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54f74849c6-f5nfw"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.281766 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.283085 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.283112 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerStarted","Data":"cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f"} Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.287648 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 10 13:14:21 crc kubenswrapper[4921]: E1210 13:14:21.291894 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-nqvc7" podUID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314327 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-combined-ca-bundle\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314376 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-config\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314407 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314431 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-config\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314457 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-dns-svc\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314506 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-ovndb-tls-certs\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314547 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b868n\" (UniqueName: \"kubernetes.io/projected/7c3d07a5-ac22-4395-9027-78255ba114ca-kube-api-access-b868n\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314579 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314597 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-httpd-config\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.314614 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpbzd\" (UniqueName: \"kubernetes.io/projected/9282d0bd-cea3-4e42-9d5f-778f666aa65a-kube-api-access-hpbzd\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.391607 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54f74849c6-f5nfw"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416604 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-ovndb-tls-certs\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416691 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drg7l\" (UniqueName: \"kubernetes.io/projected/a179cfa2-4a28-42ee-a55e-e95e808bb297-kube-api-access-drg7l\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416735 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b868n\" (UniqueName: \"kubernetes.io/projected/7c3d07a5-ac22-4395-9027-78255ba114ca-kube-api-access-b868n\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416760 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data-custom\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416774 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-combined-ca-bundle\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416800 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416816 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416840 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-httpd-config\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416857 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpbzd\" (UniqueName: \"kubernetes.io/projected/9282d0bd-cea3-4e42-9d5f-778f666aa65a-kube-api-access-hpbzd\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416878 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a179cfa2-4a28-42ee-a55e-e95e808bb297-logs\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416900 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-combined-ca-bundle\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416924 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-config\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416943 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.416987 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-config\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.417019 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-dns-svc\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.417929 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-dns-svc\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.418516 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-config\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.418539 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.419112 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.421767 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-httpd-config\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.437360 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.438528 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-ovndb-tls-certs\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.444847 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-combined-ca-bundle\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.452949 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b868n\" (UniqueName: \"kubernetes.io/projected/7c3d07a5-ac22-4395-9027-78255ba114ca-kube-api-access-b868n\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.462081 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpbzd\" (UniqueName: \"kubernetes.io/projected/9282d0bd-cea3-4e42-9d5f-778f666aa65a-kube-api-access-hpbzd\") pod \"dnsmasq-dns-869f779d85-gbgn5\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.462168 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-config\") pod \"neutron-f7c76d556-mfrfz\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.520340 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drg7l\" (UniqueName: \"kubernetes.io/projected/a179cfa2-4a28-42ee-a55e-e95e808bb297-kube-api-access-drg7l\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.520742 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data-custom\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.521094 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-combined-ca-bundle\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.521299 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.521330 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a179cfa2-4a28-42ee-a55e-e95e808bb297-logs\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.521815 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a179cfa2-4a28-42ee-a55e-e95e808bb297-logs\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.533489 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data-custom\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.536244 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.537221 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-combined-ca-bundle\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.548826 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.556082 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drg7l\" (UniqueName: \"kubernetes.io/projected/a179cfa2-4a28-42ee-a55e-e95e808bb297-kube-api-access-drg7l\") pod \"barbican-api-54f74849c6-f5nfw\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.624863 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-sb\") pod \"a754d7d4-7b14-49f8-b934-cd4adf91840e\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.624901 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-dns-svc\") pod \"a754d7d4-7b14-49f8-b934-cd4adf91840e\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.624973 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-config\") pod \"a754d7d4-7b14-49f8-b934-cd4adf91840e\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.625012 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-nb\") pod \"a754d7d4-7b14-49f8-b934-cd4adf91840e\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.625054 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grc8d\" (UniqueName: \"kubernetes.io/projected/a754d7d4-7b14-49f8-b934-cd4adf91840e-kube-api-access-grc8d\") pod \"a754d7d4-7b14-49f8-b934-cd4adf91840e\" (UID: \"a754d7d4-7b14-49f8-b934-cd4adf91840e\") " Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.625987 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a754d7d4-7b14-49f8-b934-cd4adf91840e" (UID: "a754d7d4-7b14-49f8-b934-cd4adf91840e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.626349 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a754d7d4-7b14-49f8-b934-cd4adf91840e" (UID: "a754d7d4-7b14-49f8-b934-cd4adf91840e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.626577 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-config" (OuterVolumeSpecName: "config") pod "a754d7d4-7b14-49f8-b934-cd4adf91840e" (UID: "a754d7d4-7b14-49f8-b934-cd4adf91840e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.626805 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.626861 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a754d7d4-7b14-49f8-b934-cd4adf91840e" (UID: "a754d7d4-7b14-49f8-b934-cd4adf91840e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.647724 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a754d7d4-7b14-49f8-b934-cd4adf91840e-kube-api-access-grc8d" (OuterVolumeSpecName: "kube-api-access-grc8d") pod "a754d7d4-7b14-49f8-b934-cd4adf91840e" (UID: "a754d7d4-7b14-49f8-b934-cd4adf91840e"). InnerVolumeSpecName "kube-api-access-grc8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.669597 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f895c947d-lxwdl"] Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.731459 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grc8d\" (UniqueName: \"kubernetes.io/projected/a754d7d4-7b14-49f8-b934-cd4adf91840e-kube-api-access-grc8d\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.731495 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.731505 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.731513 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.731523 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a754d7d4-7b14-49f8-b934-cd4adf91840e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:21 crc kubenswrapper[4921]: I1210 13:14:21.735257 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:21 crc kubenswrapper[4921]: W1210 13:14:21.753955 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8ebbb77_6cb6_44f7_86cd_42bf505379ae.slice/crio-1b06d5c089885df3b49628052fb2b839949645a1abbbe96322487153584d4d0b WatchSource:0}: Error finding container 1b06d5c089885df3b49628052fb2b839949645a1abbbe96322487153584d4d0b: Status 404 returned error can't find the container with id 1b06d5c089885df3b49628052fb2b839949645a1abbbe96322487153584d4d0b Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.017258 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5db748cdcf-qnswn"] Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.086306 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-565cd4b5db-xfcb6"] Dec 10 13:14:22 crc kubenswrapper[4921]: W1210 13:14:22.086763 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d4b5545_ad15_4ed1_b655_61fa51deb3d7.slice/crio-ae87c49ae6eb8e3450091e06d0c614c27063385726edab4bc260085f87852f01 WatchSource:0}: Error finding container ae87c49ae6eb8e3450091e06d0c614c27063385726edab4bc260085f87852f01: Status 404 returned error can't find the container with id ae87c49ae6eb8e3450091e06d0c614c27063385726edab4bc260085f87852f01 Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.255317 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5fdffd7d58-bx6fh"] Dec 10 13:14:22 crc kubenswrapper[4921]: W1210 13:14:22.289830 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bad2725_64db_4468_b2c2_a1454f632ed9.slice/crio-e02430f58d04869472bbda021f7610dae2f384d749e011b130d01b05ebecaa85 WatchSource:0}: Error finding container e02430f58d04869472bbda021f7610dae2f384d749e011b130d01b05ebecaa85: Status 404 returned error can't find the container with id e02430f58d04869472bbda021f7610dae2f384d749e011b130d01b05ebecaa85 Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.299018 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5db748cdcf-qnswn" event={"ID":"1e30b467-3355-49fe-862d-9a79cdf0a35b","Type":"ContainerStarted","Data":"aecb8dc7731b6f912e3130984bcdf25363ad8da6409059d61b90258ef17989de"} Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.302532 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-565cd4b5db-xfcb6" event={"ID":"2d4b5545-ad15-4ed1-b655-61fa51deb3d7","Type":"ContainerStarted","Data":"ae87c49ae6eb8e3450091e06d0c614c27063385726edab4bc260085f87852f01"} Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.311715 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-6klql" Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.312780 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f895c947d-lxwdl" event={"ID":"f8ebbb77-6cb6-44f7-86cd-42bf505379ae","Type":"ContainerStarted","Data":"d37c4ffca0c3564a1163dd7c304694a22d5605f5e2eeaeafe3ce69df158ba7dd"} Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.312868 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f895c947d-lxwdl" event={"ID":"f8ebbb77-6cb6-44f7-86cd-42bf505379ae","Type":"ContainerStarted","Data":"1b06d5c089885df3b49628052fb2b839949645a1abbbe96322487153584d4d0b"} Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.390427 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6klql"] Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.396861 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-6klql"] Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.537173 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54f74849c6-f5nfw"] Dec 10 13:14:22 crc kubenswrapper[4921]: W1210 13:14:22.551533 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9282d0bd_cea3_4e42_9d5f_778f666aa65a.slice/crio-eb99c6fab6077b00e443554c1d92e8a1311ce3bad5d8b459addfdfeda16b1569 WatchSource:0}: Error finding container eb99c6fab6077b00e443554c1d92e8a1311ce3bad5d8b459addfdfeda16b1569: Status 404 returned error can't find the container with id eb99c6fab6077b00e443554c1d92e8a1311ce3bad5d8b459addfdfeda16b1569 Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.596583 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f7c76d556-mfrfz"] Dec 10 13:14:22 crc kubenswrapper[4921]: I1210 13:14:22.607883 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-gbgn5"] Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.211102 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a754d7d4-7b14-49f8-b934-cd4adf91840e" path="/var/lib/kubelet/pods/a754d7d4-7b14-49f8-b934-cd4adf91840e/volumes" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.350869 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f74849c6-f5nfw" event={"ID":"a179cfa2-4a28-42ee-a55e-e95e808bb297","Type":"ContainerStarted","Data":"25634e5a658ba2dfd8a1c157b9c31f286244d3ca23baa47498acd785cc920868"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.350917 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f74849c6-f5nfw" event={"ID":"a179cfa2-4a28-42ee-a55e-e95e808bb297","Type":"ContainerStarted","Data":"e6e4ff439515cfe2556f8cc83b95f1266e0e23872d40eb8b841ed34dbbd81d75"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.352889 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" event={"ID":"6bad2725-64db-4468-b2c2-a1454f632ed9","Type":"ContainerStarted","Data":"e02430f58d04869472bbda021f7610dae2f384d749e011b130d01b05ebecaa85"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.357694 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-565cd4b5db-xfcb6" event={"ID":"2d4b5545-ad15-4ed1-b655-61fa51deb3d7","Type":"ContainerStarted","Data":"2eef349dee6bc023fca68f657cd16092b595329744847388d168e0952627cbea"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.357801 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.361722 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f895c947d-lxwdl" event={"ID":"f8ebbb77-6cb6-44f7-86cd-42bf505379ae","Type":"ContainerStarted","Data":"b28f22b4ee6f0ccad041a0926c8a8716d9423f83277cf2b2de758dfee4dfdb32"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.361790 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.361819 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.370289 4921 generic.go:334] "Generic (PLEG): container finished" podID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerID="12afae38cbf0087fedac75025736e96a07a2d919bace3df6fb8a8bff614ccb4f" exitCode=0 Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.370540 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" event={"ID":"9282d0bd-cea3-4e42-9d5f-778f666aa65a","Type":"ContainerDied","Data":"12afae38cbf0087fedac75025736e96a07a2d919bace3df6fb8a8bff614ccb4f"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.370592 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" event={"ID":"9282d0bd-cea3-4e42-9d5f-778f666aa65a","Type":"ContainerStarted","Data":"eb99c6fab6077b00e443554c1d92e8a1311ce3bad5d8b459addfdfeda16b1569"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.372728 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f7c76d556-mfrfz" event={"ID":"7c3d07a5-ac22-4395-9027-78255ba114ca","Type":"ContainerStarted","Data":"27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.372871 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f7c76d556-mfrfz" event={"ID":"7c3d07a5-ac22-4395-9027-78255ba114ca","Type":"ContainerStarted","Data":"24ae530384db7f467e477a62b1177a7093b13a504dbd293a765cf23b5500fc43"} Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.391446 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-565cd4b5db-xfcb6" podStartSLOduration=3.391425 podStartE2EDuration="3.391425s" podCreationTimestamp="2025-12-10 13:14:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:23.376639874 +0000 UTC m=+1060.592861818" watchObservedRunningTime="2025-12-10 13:14:23.391425 +0000 UTC m=+1060.607646934" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.431124 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5f895c947d-lxwdl" podStartSLOduration=15.431106305 podStartE2EDuration="15.431106305s" podCreationTimestamp="2025-12-10 13:14:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:23.428017492 +0000 UTC m=+1060.644239426" watchObservedRunningTime="2025-12-10 13:14:23.431106305 +0000 UTC m=+1060.647328229" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.686021 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-766458cb5f-2g6ln"] Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.687550 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.698656 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.699056 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.711779 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-766458cb5f-2g6ln"] Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.791512 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-internal-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.791784 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-public-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.791918 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-config\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.792065 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdl99\" (UniqueName: \"kubernetes.io/projected/702f0832-8949-4724-b03d-2a97e46e421e-kube-api-access-hdl99\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.792190 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-combined-ca-bundle\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.792296 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-ovndb-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.792427 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-httpd-config\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893468 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-config\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893531 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdl99\" (UniqueName: \"kubernetes.io/projected/702f0832-8949-4724-b03d-2a97e46e421e-kube-api-access-hdl99\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893566 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-combined-ca-bundle\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893583 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-ovndb-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893611 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-httpd-config\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893652 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-internal-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.893715 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-public-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.901074 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-ovndb-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.905651 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-httpd-config\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.906254 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-combined-ca-bundle\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.906838 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-public-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.912473 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-config\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.924969 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdl99\" (UniqueName: \"kubernetes.io/projected/702f0832-8949-4724-b03d-2a97e46e421e-kube-api-access-hdl99\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:23 crc kubenswrapper[4921]: I1210 13:14:23.925278 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/702f0832-8949-4724-b03d-2a97e46e421e-internal-tls-certs\") pod \"neutron-766458cb5f-2g6ln\" (UID: \"702f0832-8949-4724-b03d-2a97e46e421e\") " pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.009329 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.397782 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" event={"ID":"9282d0bd-cea3-4e42-9d5f-778f666aa65a","Type":"ContainerStarted","Data":"45c0f4a624d35bb92ef11013632cf8f6e37292e2c5fdf886434e207c4744679e"} Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.399868 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.406653 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f7c76d556-mfrfz" event={"ID":"7c3d07a5-ac22-4395-9027-78255ba114ca","Type":"ContainerStarted","Data":"c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff"} Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.408154 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.410807 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f74849c6-f5nfw" event={"ID":"a179cfa2-4a28-42ee-a55e-e95e808bb297","Type":"ContainerStarted","Data":"036636c1d617230b28e40e030a7e41e89c50b61e4384d75babb155c7310ebbeb"} Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.418544 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.420759 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.439794 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" podStartSLOduration=3.439771295 podStartE2EDuration="3.439771295s" podCreationTimestamp="2025-12-10 13:14:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:24.419800789 +0000 UTC m=+1061.636022723" watchObservedRunningTime="2025-12-10 13:14:24.439771295 +0000 UTC m=+1061.655993219" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.450793 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54f74849c6-f5nfw" podStartSLOduration=3.450774761 podStartE2EDuration="3.450774761s" podCreationTimestamp="2025-12-10 13:14:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:24.441955774 +0000 UTC m=+1061.658177698" watchObservedRunningTime="2025-12-10 13:14:24.450774761 +0000 UTC m=+1061.666996695" Dec 10 13:14:24 crc kubenswrapper[4921]: I1210 13:14:24.470963 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-f7c76d556-mfrfz" podStartSLOduration=3.470945002 podStartE2EDuration="3.470945002s" podCreationTimestamp="2025-12-10 13:14:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:24.465485686 +0000 UTC m=+1061.681707620" watchObservedRunningTime="2025-12-10 13:14:24.470945002 +0000 UTC m=+1061.687166926" Dec 10 13:14:25 crc kubenswrapper[4921]: I1210 13:14:25.228647 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-766458cb5f-2g6ln"] Dec 10 13:14:25 crc kubenswrapper[4921]: W1210 13:14:25.826039 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod702f0832_8949_4724_b03d_2a97e46e421e.slice/crio-a94ef47950747d14bd4b6641d17bffbf73ec4f7f2378de2c72dc8271e83cce95 WatchSource:0}: Error finding container a94ef47950747d14bd4b6641d17bffbf73ec4f7f2378de2c72dc8271e83cce95: Status 404 returned error can't find the container with id a94ef47950747d14bd4b6641d17bffbf73ec4f7f2378de2c72dc8271e83cce95 Dec 10 13:14:26 crc kubenswrapper[4921]: I1210 13:14:26.444117 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5db748cdcf-qnswn" event={"ID":"1e30b467-3355-49fe-862d-9a79cdf0a35b","Type":"ContainerStarted","Data":"3629a614871004e842485c54697302c3edc26866efef99bd95ec2f4516d68534"} Dec 10 13:14:26 crc kubenswrapper[4921]: I1210 13:14:26.451051 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-766458cb5f-2g6ln" event={"ID":"702f0832-8949-4724-b03d-2a97e46e421e","Type":"ContainerStarted","Data":"4f680e2fed664341361b7e3d9373b525007279fe7f794f146ca13c788c4e5c75"} Dec 10 13:14:26 crc kubenswrapper[4921]: I1210 13:14:26.451097 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-766458cb5f-2g6ln" event={"ID":"702f0832-8949-4724-b03d-2a97e46e421e","Type":"ContainerStarted","Data":"a94ef47950747d14bd4b6641d17bffbf73ec4f7f2378de2c72dc8271e83cce95"} Dec 10 13:14:26 crc kubenswrapper[4921]: I1210 13:14:26.453742 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" event={"ID":"6bad2725-64db-4468-b2c2-a1454f632ed9","Type":"ContainerStarted","Data":"9803dc7ab46679aadcc3f664bcc1ccb8d411d33554a337e36ba5b2730118f134"} Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.268301 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5659c44dfd-j5lr5"] Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.271680 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.273561 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.273622 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.356849 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-config-data\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.356923 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-public-tls-certs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.357010 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-internal-tls-certs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.357033 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-logs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.357085 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-config-data-custom\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.357111 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-combined-ca-bundle\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.357151 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck79g\" (UniqueName: \"kubernetes.io/projected/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-kube-api-access-ck79g\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.379379 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5659c44dfd-j5lr5"] Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458527 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-config-data\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458580 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-public-tls-certs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458647 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-internal-tls-certs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458667 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-logs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458701 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-config-data-custom\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458727 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-combined-ca-bundle\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.458749 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck79g\" (UniqueName: \"kubernetes.io/projected/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-kube-api-access-ck79g\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.461489 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-logs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.469337 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-config-data\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.470141 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-config-data-custom\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.476017 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-internal-tls-certs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.476477 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-combined-ca-bundle\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.478155 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-public-tls-certs\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.479640 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck79g\" (UniqueName: \"kubernetes.io/projected/af6a0c63-c4e3-4b87-9800-60cc17fbb13d-kube-api-access-ck79g\") pod \"barbican-api-5659c44dfd-j5lr5\" (UID: \"af6a0c63-c4e3-4b87-9800-60cc17fbb13d\") " pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.482209 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5db748cdcf-qnswn" event={"ID":"1e30b467-3355-49fe-862d-9a79cdf0a35b","Type":"ContainerStarted","Data":"664c084faeea4a8d34ac8da38df9499be9414a09279d1a8680f974720ffff1dd"} Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.490905 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-766458cb5f-2g6ln" event={"ID":"702f0832-8949-4724-b03d-2a97e46e421e","Type":"ContainerStarted","Data":"596f5148f5857af09734c89d5c56767fdf2a0a61ac339f441812d4c0524a0cac"} Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.491363 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.492676 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" event={"ID":"6bad2725-64db-4468-b2c2-a1454f632ed9","Type":"ContainerStarted","Data":"066570ecd30d06e8bfd361c9beadf738d8245bcfdaa6f194dcfcd7d194fd4612"} Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.513535 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5db748cdcf-qnswn" podStartSLOduration=3.71000362 podStartE2EDuration="7.513517046s" podCreationTimestamp="2025-12-10 13:14:20 +0000 UTC" firstStartedPulling="2025-12-10 13:14:22.031213826 +0000 UTC m=+1059.247435750" lastFinishedPulling="2025-12-10 13:14:25.834727252 +0000 UTC m=+1063.050949176" observedRunningTime="2025-12-10 13:14:27.500366243 +0000 UTC m=+1064.716588177" watchObservedRunningTime="2025-12-10 13:14:27.513517046 +0000 UTC m=+1064.729738970" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.524950 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5fdffd7d58-bx6fh" podStartSLOduration=3.875157152 podStartE2EDuration="7.524933032s" podCreationTimestamp="2025-12-10 13:14:20 +0000 UTC" firstStartedPulling="2025-12-10 13:14:22.295374545 +0000 UTC m=+1059.511596469" lastFinishedPulling="2025-12-10 13:14:25.945150425 +0000 UTC m=+1063.161372349" observedRunningTime="2025-12-10 13:14:27.521776217 +0000 UTC m=+1064.737998151" watchObservedRunningTime="2025-12-10 13:14:27.524933032 +0000 UTC m=+1064.741154956" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.552825 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-766458cb5f-2g6ln" podStartSLOduration=4.55280893 podStartE2EDuration="4.55280893s" podCreationTimestamp="2025-12-10 13:14:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:27.541916738 +0000 UTC m=+1064.758138672" watchObservedRunningTime="2025-12-10 13:14:27.55280893 +0000 UTC m=+1064.769030854" Dec 10 13:14:27 crc kubenswrapper[4921]: I1210 13:14:27.594783 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:31 crc kubenswrapper[4921]: I1210 13:14:31.628546 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:31 crc kubenswrapper[4921]: I1210 13:14:31.678171 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-5858x"] Dec 10 13:14:31 crc kubenswrapper[4921]: I1210 13:14:31.678681 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="dnsmasq-dns" containerID="cri-o://7bea83f6d3222728775396936253df5d0c0cd00c2a63d440f4a22b7af83a60e7" gracePeriod=10 Dec 10 13:14:32 crc kubenswrapper[4921]: I1210 13:14:32.091086 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.135:5353: connect: connection refused" Dec 10 13:14:32 crc kubenswrapper[4921]: I1210 13:14:32.535477 4921 generic.go:334] "Generic (PLEG): container finished" podID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerID="7bea83f6d3222728775396936253df5d0c0cd00c2a63d440f4a22b7af83a60e7" exitCode=0 Dec 10 13:14:32 crc kubenswrapper[4921]: I1210 13:14:32.535545 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" event={"ID":"ad9192c4-9b58-4292-a701-c6cb4de2c679","Type":"ContainerDied","Data":"7bea83f6d3222728775396936253df5d0c0cd00c2a63d440f4a22b7af83a60e7"} Dec 10 13:14:33 crc kubenswrapper[4921]: I1210 13:14:33.178925 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:33 crc kubenswrapper[4921]: I1210 13:14:33.362221 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.003361 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.009658 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtf2s\" (UniqueName: \"kubernetes.io/projected/ad9192c4-9b58-4292-a701-c6cb4de2c679-kube-api-access-mtf2s\") pod \"ad9192c4-9b58-4292-a701-c6cb4de2c679\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.009700 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-sb\") pod \"ad9192c4-9b58-4292-a701-c6cb4de2c679\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.009745 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-dns-svc\") pod \"ad9192c4-9b58-4292-a701-c6cb4de2c679\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.009825 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-nb\") pod \"ad9192c4-9b58-4292-a701-c6cb4de2c679\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.009848 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-config\") pod \"ad9192c4-9b58-4292-a701-c6cb4de2c679\" (UID: \"ad9192c4-9b58-4292-a701-c6cb4de2c679\") " Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.019123 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad9192c4-9b58-4292-a701-c6cb4de2c679-kube-api-access-mtf2s" (OuterVolumeSpecName: "kube-api-access-mtf2s") pod "ad9192c4-9b58-4292-a701-c6cb4de2c679" (UID: "ad9192c4-9b58-4292-a701-c6cb4de2c679"). InnerVolumeSpecName "kube-api-access-mtf2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.111491 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtf2s\" (UniqueName: \"kubernetes.io/projected/ad9192c4-9b58-4292-a701-c6cb4de2c679-kube-api-access-mtf2s\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.148187 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ad9192c4-9b58-4292-a701-c6cb4de2c679" (UID: "ad9192c4-9b58-4292-a701-c6cb4de2c679"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.216668 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.218126 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-config" (OuterVolumeSpecName: "config") pod "ad9192c4-9b58-4292-a701-c6cb4de2c679" (UID: "ad9192c4-9b58-4292-a701-c6cb4de2c679"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.267479 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ad9192c4-9b58-4292-a701-c6cb4de2c679" (UID: "ad9192c4-9b58-4292-a701-c6cb4de2c679"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.306210 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ad9192c4-9b58-4292-a701-c6cb4de2c679" (UID: "ad9192c4-9b58-4292-a701-c6cb4de2c679"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.322480 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.322545 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.322556 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad9192c4-9b58-4292-a701-c6cb4de2c679-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.555244 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" event={"ID":"ad9192c4-9b58-4292-a701-c6cb4de2c679","Type":"ContainerDied","Data":"37aafa4c60182d22eb874e01d4ca977576c221c87d1d86050d14ed2b4cdce157"} Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.555294 4921 scope.go:117] "RemoveContainer" containerID="7bea83f6d3222728775396936253df5d0c0cd00c2a63d440f4a22b7af83a60e7" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.555490 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-5858x" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.602427 4921 scope.go:117] "RemoveContainer" containerID="46287e25c5d16740585bb416ff7ecd1c756533f4c11a739ed26a143af11a435b" Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.606643 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5659c44dfd-j5lr5"] Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.624511 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-5858x"] Dec 10 13:14:34 crc kubenswrapper[4921]: I1210 13:14:34.634743 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-5858x"] Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.204089 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" path="/var/lib/kubelet/pods/ad9192c4-9b58-4292-a701-c6cb4de2c679/volumes" Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.573568 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerStarted","Data":"7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb"} Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.573704 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.573672 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-central-agent" containerID="cri-o://125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3" gracePeriod=30 Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.573740 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="sg-core" containerID="cri-o://cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f" gracePeriod=30 Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.573797 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-notification-agent" containerID="cri-o://ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0" gracePeriod=30 Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.573894 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="proxy-httpd" containerID="cri-o://7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb" gracePeriod=30 Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.588133 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5659c44dfd-j5lr5" event={"ID":"af6a0c63-c4e3-4b87-9800-60cc17fbb13d","Type":"ContainerStarted","Data":"3912de56f8a0309b699415ac242deb49e5733d07708b8594a3bc13754792e1f1"} Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.588175 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5659c44dfd-j5lr5" event={"ID":"af6a0c63-c4e3-4b87-9800-60cc17fbb13d","Type":"ContainerStarted","Data":"f0747a332feef88e67caad302dc360cfe61111397310bcd09bed6c74a39bd1a0"} Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.588187 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5659c44dfd-j5lr5" event={"ID":"af6a0c63-c4e3-4b87-9800-60cc17fbb13d","Type":"ContainerStarted","Data":"53e280c39dd1d37021256ef7a8c8a1f8c6cb645b8cbe7a777110a5425ec6cca7"} Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.588324 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.588374 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.604930 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.055393655 podStartE2EDuration="49.604903017s" podCreationTimestamp="2025-12-10 13:13:46 +0000 UTC" firstStartedPulling="2025-12-10 13:13:47.886674289 +0000 UTC m=+1025.102896213" lastFinishedPulling="2025-12-10 13:14:34.436183651 +0000 UTC m=+1071.652405575" observedRunningTime="2025-12-10 13:14:35.598438803 +0000 UTC m=+1072.814660717" watchObservedRunningTime="2025-12-10 13:14:35.604903017 +0000 UTC m=+1072.821124951" Dec 10 13:14:35 crc kubenswrapper[4921]: I1210 13:14:35.629075 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5659c44dfd-j5lr5" podStartSLOduration=8.629045895 podStartE2EDuration="8.629045895s" podCreationTimestamp="2025-12-10 13:14:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:35.618186473 +0000 UTC m=+1072.834408437" watchObservedRunningTime="2025-12-10 13:14:35.629045895 +0000 UTC m=+1072.845267859" Dec 10 13:14:36 crc kubenswrapper[4921]: I1210 13:14:36.615064 4921 generic.go:334] "Generic (PLEG): container finished" podID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerID="7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb" exitCode=0 Dec 10 13:14:36 crc kubenswrapper[4921]: I1210 13:14:36.615592 4921 generic.go:334] "Generic (PLEG): container finished" podID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerID="cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f" exitCode=2 Dec 10 13:14:36 crc kubenswrapper[4921]: I1210 13:14:36.615611 4921 generic.go:334] "Generic (PLEG): container finished" podID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerID="125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3" exitCode=0 Dec 10 13:14:36 crc kubenswrapper[4921]: I1210 13:14:36.615279 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerDied","Data":"7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb"} Dec 10 13:14:36 crc kubenswrapper[4921]: I1210 13:14:36.615984 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerDied","Data":"cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f"} Dec 10 13:14:36 crc kubenswrapper[4921]: I1210 13:14:36.616030 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerDied","Data":"125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3"} Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.121871 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.280252 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-run-httpd\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.280837 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-log-httpd\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.280957 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-config-data\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.281069 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-combined-ca-bundle\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.281095 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-sg-core-conf-yaml\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.281119 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jrch\" (UniqueName: \"kubernetes.io/projected/bc458bae-c665-435a-ae77-3b7cb34146bb-kube-api-access-8jrch\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.281158 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-scripts\") pod \"bc458bae-c665-435a-ae77-3b7cb34146bb\" (UID: \"bc458bae-c665-435a-ae77-3b7cb34146bb\") " Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.281704 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.281954 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.287766 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc458bae-c665-435a-ae77-3b7cb34146bb-kube-api-access-8jrch" (OuterVolumeSpecName: "kube-api-access-8jrch") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "kube-api-access-8jrch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.288750 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-scripts" (OuterVolumeSpecName: "scripts") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.322541 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.358245 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.382273 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.382317 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc458bae-c665-435a-ae77-3b7cb34146bb-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.382329 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.382344 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.382354 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jrch\" (UniqueName: \"kubernetes.io/projected/bc458bae-c665-435a-ae77-3b7cb34146bb-kube-api-access-8jrch\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.382363 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.388276 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-config-data" (OuterVolumeSpecName: "config-data") pod "bc458bae-c665-435a-ae77-3b7cb34146bb" (UID: "bc458bae-c665-435a-ae77-3b7cb34146bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.483362 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc458bae-c665-435a-ae77-3b7cb34146bb-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.628884 4921 generic.go:334] "Generic (PLEG): container finished" podID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerID="ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0" exitCode=0 Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.628944 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerDied","Data":"ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0"} Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.628965 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.628982 4921 scope.go:117] "RemoveContainer" containerID="7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.628971 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc458bae-c665-435a-ae77-3b7cb34146bb","Type":"ContainerDied","Data":"0f049ab65e76d724f2244d86e28a9228015a82a676aaed4e6e336cc6ae61a9c3"} Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.630932 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nqvc7" event={"ID":"049815fe-e8f8-45c6-9360-d2d331fa8cd3","Type":"ContainerStarted","Data":"2fca88647a2c15e9928330cc72846624e630fe22cad55c0a5e782e7586e241b8"} Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.652303 4921 scope.go:117] "RemoveContainer" containerID="cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.681808 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-nqvc7" podStartSLOduration=2.576654586 podStartE2EDuration="51.681786544s" podCreationTimestamp="2025-12-10 13:13:46 +0000 UTC" firstStartedPulling="2025-12-10 13:13:47.575728134 +0000 UTC m=+1024.791950058" lastFinishedPulling="2025-12-10 13:14:36.680860082 +0000 UTC m=+1073.897082016" observedRunningTime="2025-12-10 13:14:37.658755186 +0000 UTC m=+1074.874977130" watchObservedRunningTime="2025-12-10 13:14:37.681786544 +0000 UTC m=+1074.898008468" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.695455 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.708141 4921 scope.go:117] "RemoveContainer" containerID="ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.720459 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.729922 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.730355 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-notification-agent" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730372 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-notification-agent" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.730395 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="proxy-httpd" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730402 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="proxy-httpd" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.730416 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="sg-core" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730422 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="sg-core" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.730432 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="init" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730437 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="init" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.730448 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-central-agent" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730454 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-central-agent" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.730469 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="dnsmasq-dns" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730474 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="dnsmasq-dns" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730650 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-notification-agent" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730669 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="proxy-httpd" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730685 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="ceilometer-central-agent" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730698 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" containerName="sg-core" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.730713 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9192c4-9b58-4292-a701-c6cb4de2c679" containerName="dnsmasq-dns" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.732172 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.745927 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.746193 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.757456 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.774558 4921 scope.go:117] "RemoveContainer" containerID="125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787278 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-run-httpd\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787317 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787341 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787416 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-log-httpd\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787438 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-scripts\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787457 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-config-data\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.787477 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xmd5\" (UniqueName: \"kubernetes.io/projected/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-kube-api-access-4xmd5\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.804022 4921 scope.go:117] "RemoveContainer" containerID="7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.804549 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb\": container with ID starting with 7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb not found: ID does not exist" containerID="7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.804597 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb"} err="failed to get container status \"7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb\": rpc error: code = NotFound desc = could not find container \"7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb\": container with ID starting with 7e85f1df6a2ddfb5bd28cb1b68431578e37740b5b2f8188e85372ea19dac3ecb not found: ID does not exist" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.804628 4921 scope.go:117] "RemoveContainer" containerID="cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.804850 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f\": container with ID starting with cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f not found: ID does not exist" containerID="cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.804876 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f"} err="failed to get container status \"cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f\": rpc error: code = NotFound desc = could not find container \"cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f\": container with ID starting with cd2d856b0b56d9bfd2ab7c2ecee50a4edd3058fe27f6cf793b6601972e1eba4f not found: ID does not exist" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.804889 4921 scope.go:117] "RemoveContainer" containerID="ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.805567 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0\": container with ID starting with ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0 not found: ID does not exist" containerID="ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.805590 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0"} err="failed to get container status \"ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0\": rpc error: code = NotFound desc = could not find container \"ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0\": container with ID starting with ecd72cec7f362008bab6633085781d8ee98e47726d4bfbc7fd117b648233a9b0 not found: ID does not exist" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.805602 4921 scope.go:117] "RemoveContainer" containerID="125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3" Dec 10 13:14:37 crc kubenswrapper[4921]: E1210 13:14:37.805811 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3\": container with ID starting with 125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3 not found: ID does not exist" containerID="125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.805834 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3"} err="failed to get container status \"125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3\": rpc error: code = NotFound desc = could not find container \"125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3\": container with ID starting with 125289fd7175b7978e03316cc021d74bc2b3a57c6164c00d83e6a66b8fddefd3 not found: ID does not exist" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888752 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-log-httpd\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888811 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-scripts\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888835 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-config-data\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888857 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xmd5\" (UniqueName: \"kubernetes.io/projected/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-kube-api-access-4xmd5\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888918 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-run-httpd\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888933 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.888952 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.890412 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-log-httpd\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.890820 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-run-httpd\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.895005 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-scripts\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.896278 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.896731 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-config-data\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.910295 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:37 crc kubenswrapper[4921]: I1210 13:14:37.913018 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xmd5\" (UniqueName: \"kubernetes.io/projected/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-kube-api-access-4xmd5\") pod \"ceilometer-0\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " pod="openstack/ceilometer-0" Dec 10 13:14:38 crc kubenswrapper[4921]: I1210 13:14:38.061865 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:14:38 crc kubenswrapper[4921]: I1210 13:14:38.546936 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:14:38 crc kubenswrapper[4921]: W1210 13:14:38.550107 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod042f6d1a_e1d3_47ff_b2a5_b5c34b5063f3.slice/crio-27067527f96a61d8817fbb9e4f02181d6429092cb85d73c822dda45571282237 WatchSource:0}: Error finding container 27067527f96a61d8817fbb9e4f02181d6429092cb85d73c822dda45571282237: Status 404 returned error can't find the container with id 27067527f96a61d8817fbb9e4f02181d6429092cb85d73c822dda45571282237 Dec 10 13:14:38 crc kubenswrapper[4921]: I1210 13:14:38.651006 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerStarted","Data":"27067527f96a61d8817fbb9e4f02181d6429092cb85d73c822dda45571282237"} Dec 10 13:14:39 crc kubenswrapper[4921]: I1210 13:14:39.226931 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc458bae-c665-435a-ae77-3b7cb34146bb" path="/var/lib/kubelet/pods/bc458bae-c665-435a-ae77-3b7cb34146bb/volumes" Dec 10 13:14:39 crc kubenswrapper[4921]: I1210 13:14:39.664295 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerStarted","Data":"006a32c358d67b2a512f882097b0e99c53f37ba3797bbb62383de910c679d1cb"} Dec 10 13:14:40 crc kubenswrapper[4921]: I1210 13:14:40.675382 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerStarted","Data":"02c830af213e1994e4cfd05099da4461a5e2e41437e742d546c59521e54603e9"} Dec 10 13:14:40 crc kubenswrapper[4921]: I1210 13:14:40.771166 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:40 crc kubenswrapper[4921]: I1210 13:14:40.776179 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5f895c947d-lxwdl" Dec 10 13:14:41 crc kubenswrapper[4921]: I1210 13:14:41.725755 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerStarted","Data":"494d73178db963ba95bdf08818a51444e915c8afffa6d8ce10c3493d7ad9d1b6"} Dec 10 13:14:43 crc kubenswrapper[4921]: I1210 13:14:43.741842 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerStarted","Data":"fb206cf0501cc82c3e2ffaff2b457c7b4f872a14a253f62f7f5a083ca621d141"} Dec 10 13:14:43 crc kubenswrapper[4921]: I1210 13:14:43.743163 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:14:43 crc kubenswrapper[4921]: I1210 13:14:43.769252 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.556355242 podStartE2EDuration="6.769235824s" podCreationTimestamp="2025-12-10 13:14:37 +0000 UTC" firstStartedPulling="2025-12-10 13:14:38.552561834 +0000 UTC m=+1075.768783758" lastFinishedPulling="2025-12-10 13:14:42.765442416 +0000 UTC m=+1079.981664340" observedRunningTime="2025-12-10 13:14:43.760655394 +0000 UTC m=+1080.976877318" watchObservedRunningTime="2025-12-10 13:14:43.769235824 +0000 UTC m=+1080.985457748" Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.379536 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.513993 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5659c44dfd-j5lr5" Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.574617 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-54f74849c6-f5nfw"] Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.574872 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-54f74849c6-f5nfw" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api-log" containerID="cri-o://25634e5a658ba2dfd8a1c157b9c31f286244d3ca23baa47498acd785cc920868" gracePeriod=30 Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.575327 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-54f74849c6-f5nfw" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api" containerID="cri-o://036636c1d617230b28e40e030a7e41e89c50b61e4384d75babb155c7310ebbeb" gracePeriod=30 Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.770939 4921 generic.go:334] "Generic (PLEG): container finished" podID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerID="25634e5a658ba2dfd8a1c157b9c31f286244d3ca23baa47498acd785cc920868" exitCode=143 Dec 10 13:14:44 crc kubenswrapper[4921]: I1210 13:14:44.771991 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f74849c6-f5nfw" event={"ID":"a179cfa2-4a28-42ee-a55e-e95e808bb297","Type":"ContainerDied","Data":"25634e5a658ba2dfd8a1c157b9c31f286244d3ca23baa47498acd785cc920868"} Dec 10 13:14:45 crc kubenswrapper[4921]: I1210 13:14:45.782827 4921 generic.go:334] "Generic (PLEG): container finished" podID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" containerID="2fca88647a2c15e9928330cc72846624e630fe22cad55c0a5e782e7586e241b8" exitCode=0 Dec 10 13:14:45 crc kubenswrapper[4921]: I1210 13:14:45.782870 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nqvc7" event={"ID":"049815fe-e8f8-45c6-9360-d2d331fa8cd3","Type":"ContainerDied","Data":"2fca88647a2c15e9928330cc72846624e630fe22cad55c0a5e782e7586e241b8"} Dec 10 13:14:46 crc kubenswrapper[4921]: I1210 13:14:46.710718 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:14:46 crc kubenswrapper[4921]: I1210 13:14:46.711320 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.164334 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341523 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/049815fe-e8f8-45c6-9360-d2d331fa8cd3-etc-machine-id\") pod \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341568 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swfrk\" (UniqueName: \"kubernetes.io/projected/049815fe-e8f8-45c6-9360-d2d331fa8cd3-kube-api-access-swfrk\") pod \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341615 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-scripts\") pod \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341639 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/049815fe-e8f8-45c6-9360-d2d331fa8cd3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "049815fe-e8f8-45c6-9360-d2d331fa8cd3" (UID: "049815fe-e8f8-45c6-9360-d2d331fa8cd3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341673 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-config-data\") pod \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341692 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-db-sync-config-data\") pod \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.341769 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-combined-ca-bundle\") pod \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\" (UID: \"049815fe-e8f8-45c6-9360-d2d331fa8cd3\") " Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.342100 4921 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/049815fe-e8f8-45c6-9360-d2d331fa8cd3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.347891 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-scripts" (OuterVolumeSpecName: "scripts") pod "049815fe-e8f8-45c6-9360-d2d331fa8cd3" (UID: "049815fe-e8f8-45c6-9360-d2d331fa8cd3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.347919 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/049815fe-e8f8-45c6-9360-d2d331fa8cd3-kube-api-access-swfrk" (OuterVolumeSpecName: "kube-api-access-swfrk") pod "049815fe-e8f8-45c6-9360-d2d331fa8cd3" (UID: "049815fe-e8f8-45c6-9360-d2d331fa8cd3"). InnerVolumeSpecName "kube-api-access-swfrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.361400 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "049815fe-e8f8-45c6-9360-d2d331fa8cd3" (UID: "049815fe-e8f8-45c6-9360-d2d331fa8cd3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.372167 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "049815fe-e8f8-45c6-9360-d2d331fa8cd3" (UID: "049815fe-e8f8-45c6-9360-d2d331fa8cd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.394278 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-config-data" (OuterVolumeSpecName: "config-data") pod "049815fe-e8f8-45c6-9360-d2d331fa8cd3" (UID: "049815fe-e8f8-45c6-9360-d2d331fa8cd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.444232 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.444271 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.444771 4921 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.444817 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/049815fe-e8f8-45c6-9360-d2d331fa8cd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.444828 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swfrk\" (UniqueName: \"kubernetes.io/projected/049815fe-e8f8-45c6-9360-d2d331fa8cd3-kube-api-access-swfrk\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.799585 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-nqvc7" event={"ID":"049815fe-e8f8-45c6-9360-d2d331fa8cd3","Type":"ContainerDied","Data":"97063519d119ecd906fd15380443b5ffb61a8f8c6a4660fea95ca53dd37eb2ea"} Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.799633 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97063519d119ecd906fd15380443b5ffb61a8f8c6a4660fea95ca53dd37eb2ea" Dec 10 13:14:47 crc kubenswrapper[4921]: I1210 13:14:47.799668 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-nqvc7" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.069307 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:14:48 crc kubenswrapper[4921]: E1210 13:14:48.069702 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" containerName="cinder-db-sync" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.069719 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" containerName="cinder-db-sync" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.069877 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" containerName="cinder-db-sync" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.070710 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.074656 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.074878 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bqds8" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.077497 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.083004 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.109260 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.154987 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-scripts\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.155140 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.253721 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-xxl5s"] Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.255510 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273234 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4fwp\" (UniqueName: \"kubernetes.io/projected/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-kube-api-access-m4fwp\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273270 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273302 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w59r5\" (UniqueName: \"kubernetes.io/projected/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-kube-api-access-w59r5\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273357 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273418 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-dns-svc\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273449 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273474 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273534 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-scripts\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273563 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-config\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273586 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273634 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.273706 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.278021 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-scripts\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.293368 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-xxl5s"] Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376271 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376329 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376363 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-config\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376379 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376434 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4fwp\" (UniqueName: \"kubernetes.io/projected/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-kube-api-access-m4fwp\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376450 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376483 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w59r5\" (UniqueName: \"kubernetes.io/projected/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-kube-api-access-w59r5\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376529 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.376569 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-dns-svc\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.377436 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-dns-svc\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.377431 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-config\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.377918 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.378478 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.382640 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.383597 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.404180 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.434013 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4fwp\" (UniqueName: \"kubernetes.io/projected/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-kube-api-access-m4fwp\") pod \"cinder-scheduler-0\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.442101 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w59r5\" (UniqueName: \"kubernetes.io/projected/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-kube-api-access-w59r5\") pod \"dnsmasq-dns-58db5546cc-xxl5s\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.634804 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.685681 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.699244 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.700588 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.712644 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.738933 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.773643 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54f74849c6-f5nfw" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.145:9311/healthcheck\": read tcp 10.217.0.2:34084->10.217.0.145:9311: read: connection reset by peer" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.773642 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-54f74849c6-f5nfw" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.145:9311/healthcheck\": read tcp 10.217.0.2:34092->10.217.0.145:9311: read: connection reset by peer" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.852819 4921 generic.go:334] "Generic (PLEG): container finished" podID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerID="036636c1d617230b28e40e030a7e41e89c50b61e4384d75babb155c7310ebbeb" exitCode=0 Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.852869 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f74849c6-f5nfw" event={"ID":"a179cfa2-4a28-42ee-a55e-e95e808bb297","Type":"ContainerDied","Data":"036636c1d617230b28e40e030a7e41e89c50b61e4384d75babb155c7310ebbeb"} Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886548 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643eb1be-46d3-4334-9e70-07dd60b02014-logs\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886602 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data-custom\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886622 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886685 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-scripts\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886707 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886729 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jm9q\" (UniqueName: \"kubernetes.io/projected/643eb1be-46d3-4334-9e70-07dd60b02014-kube-api-access-2jm9q\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.886751 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/643eb1be-46d3-4334-9e70-07dd60b02014-etc-machine-id\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.988894 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-scripts\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.988956 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.988992 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jm9q\" (UniqueName: \"kubernetes.io/projected/643eb1be-46d3-4334-9e70-07dd60b02014-kube-api-access-2jm9q\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.989022 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/643eb1be-46d3-4334-9e70-07dd60b02014-etc-machine-id\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.989100 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643eb1be-46d3-4334-9e70-07dd60b02014-logs\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.989130 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data-custom\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.989153 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.989823 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/643eb1be-46d3-4334-9e70-07dd60b02014-etc-machine-id\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.989962 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643eb1be-46d3-4334-9e70-07dd60b02014-logs\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.996374 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data-custom\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:48 crc kubenswrapper[4921]: I1210 13:14:48.996996 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-scripts\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.000005 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.006778 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.008108 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jm9q\" (UniqueName: \"kubernetes.io/projected/643eb1be-46d3-4334-9e70-07dd60b02014-kube-api-access-2jm9q\") pod \"cinder-api-0\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " pod="openstack/cinder-api-0" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.050029 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.261280 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.398595 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.502542 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data-custom\") pod \"a179cfa2-4a28-42ee-a55e-e95e808bb297\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.504971 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data\") pod \"a179cfa2-4a28-42ee-a55e-e95e808bb297\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.505026 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drg7l\" (UniqueName: \"kubernetes.io/projected/a179cfa2-4a28-42ee-a55e-e95e808bb297-kube-api-access-drg7l\") pod \"a179cfa2-4a28-42ee-a55e-e95e808bb297\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.505057 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-combined-ca-bundle\") pod \"a179cfa2-4a28-42ee-a55e-e95e808bb297\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.505134 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a179cfa2-4a28-42ee-a55e-e95e808bb297-logs\") pod \"a179cfa2-4a28-42ee-a55e-e95e808bb297\" (UID: \"a179cfa2-4a28-42ee-a55e-e95e808bb297\") " Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.507695 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a179cfa2-4a28-42ee-a55e-e95e808bb297-logs" (OuterVolumeSpecName: "logs") pod "a179cfa2-4a28-42ee-a55e-e95e808bb297" (UID: "a179cfa2-4a28-42ee-a55e-e95e808bb297"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.515416 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-xxl5s"] Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.519827 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a179cfa2-4a28-42ee-a55e-e95e808bb297" (UID: "a179cfa2-4a28-42ee-a55e-e95e808bb297"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.524893 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a179cfa2-4a28-42ee-a55e-e95e808bb297-kube-api-access-drg7l" (OuterVolumeSpecName: "kube-api-access-drg7l") pod "a179cfa2-4a28-42ee-a55e-e95e808bb297" (UID: "a179cfa2-4a28-42ee-a55e-e95e808bb297"). InnerVolumeSpecName "kube-api-access-drg7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.549994 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a179cfa2-4a28-42ee-a55e-e95e808bb297" (UID: "a179cfa2-4a28-42ee-a55e-e95e808bb297"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.620147 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drg7l\" (UniqueName: \"kubernetes.io/projected/a179cfa2-4a28-42ee-a55e-e95e808bb297-kube-api-access-drg7l\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.620208 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.620228 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a179cfa2-4a28-42ee-a55e-e95e808bb297-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.620245 4921 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.693733 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data" (OuterVolumeSpecName: "config-data") pod "a179cfa2-4a28-42ee-a55e-e95e808bb297" (UID: "a179cfa2-4a28-42ee-a55e-e95e808bb297"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.721751 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a179cfa2-4a28-42ee-a55e-e95e808bb297-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:49 crc kubenswrapper[4921]: W1210 13:14:49.788950 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod643eb1be_46d3_4334_9e70_07dd60b02014.slice/crio-408cfb8d00257cbe59536e30412485c8ff0526fae9bcfefc33218b501b140d59 WatchSource:0}: Error finding container 408cfb8d00257cbe59536e30412485c8ff0526fae9bcfefc33218b501b140d59: Status 404 returned error can't find the container with id 408cfb8d00257cbe59536e30412485c8ff0526fae9bcfefc33218b501b140d59 Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.789189 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.864374 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"643eb1be-46d3-4334-9e70-07dd60b02014","Type":"ContainerStarted","Data":"408cfb8d00257cbe59536e30412485c8ff0526fae9bcfefc33218b501b140d59"} Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.868287 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" event={"ID":"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989","Type":"ContainerStarted","Data":"d54038fdfed41690962a7eefc18a0372bf56abe9a68d39cd320affbfe07aac5e"} Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.877508 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d85ebdfb-8a6b-4630-af89-98a7fc5a6987","Type":"ContainerStarted","Data":"bf3c18b4eeecacef80b34c44519ebafd683379f6c29d87a721e082938c8ed40c"} Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.882895 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54f74849c6-f5nfw" event={"ID":"a179cfa2-4a28-42ee-a55e-e95e808bb297","Type":"ContainerDied","Data":"e6e4ff439515cfe2556f8cc83b95f1266e0e23872d40eb8b841ed34dbbd81d75"} Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.883052 4921 scope.go:117] "RemoveContainer" containerID="036636c1d617230b28e40e030a7e41e89c50b61e4384d75babb155c7310ebbeb" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.883018 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54f74849c6-f5nfw" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.946841 4921 scope.go:117] "RemoveContainer" containerID="25634e5a658ba2dfd8a1c157b9c31f286244d3ca23baa47498acd785cc920868" Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.957139 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-54f74849c6-f5nfw"] Dec 10 13:14:49 crc kubenswrapper[4921]: I1210 13:14:49.968752 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-54f74849c6-f5nfw"] Dec 10 13:14:50 crc kubenswrapper[4921]: I1210 13:14:50.899610 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"643eb1be-46d3-4334-9e70-07dd60b02014","Type":"ContainerStarted","Data":"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c"} Dec 10 13:14:50 crc kubenswrapper[4921]: I1210 13:14:50.903911 4921 generic.go:334] "Generic (PLEG): container finished" podID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerID="1ef8bc30c0fc19ae8b9c9b15a8bf14f1b88f6e57b02329c58b4f5f18c783dcf1" exitCode=0 Dec 10 13:14:50 crc kubenswrapper[4921]: I1210 13:14:50.903941 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" event={"ID":"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989","Type":"ContainerDied","Data":"1ef8bc30c0fc19ae8b9c9b15a8bf14f1b88f6e57b02329c58b4f5f18c783dcf1"} Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.020731 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.203464 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" path="/var/lib/kubelet/pods/a179cfa2-4a28-42ee-a55e-e95e808bb297/volumes" Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.561548 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.913444 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"643eb1be-46d3-4334-9e70-07dd60b02014","Type":"ContainerStarted","Data":"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3"} Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.913515 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api-log" containerID="cri-o://57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c" gracePeriod=30 Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.913615 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api" containerID="cri-o://76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3" gracePeriod=30 Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.913798 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.918981 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" event={"ID":"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989","Type":"ContainerStarted","Data":"e91d133c156ed0040ceda5bf5182460f29395de7ba613bc03c0b9cd9cc31a238"} Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.919181 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.943915 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.94389751 podStartE2EDuration="3.94389751s" podCreationTimestamp="2025-12-10 13:14:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:51.937594031 +0000 UTC m=+1089.153815955" watchObservedRunningTime="2025-12-10 13:14:51.94389751 +0000 UTC m=+1089.160119444" Dec 10 13:14:51 crc kubenswrapper[4921]: I1210 13:14:51.972020 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" podStartSLOduration=3.971999414 podStartE2EDuration="3.971999414s" podCreationTimestamp="2025-12-10 13:14:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:51.965852819 +0000 UTC m=+1089.182074743" watchObservedRunningTime="2025-12-10 13:14:51.971999414 +0000 UTC m=+1089.188221338" Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.904572 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.970011 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.970122 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"643eb1be-46d3-4334-9e70-07dd60b02014","Type":"ContainerDied","Data":"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3"} Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.970465 4921 scope.go:117] "RemoveContainer" containerID="76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3" Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.969928 4921 generic.go:334] "Generic (PLEG): container finished" podID="643eb1be-46d3-4334-9e70-07dd60b02014" containerID="76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3" exitCode=0 Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.970887 4921 generic.go:334] "Generic (PLEG): container finished" podID="643eb1be-46d3-4334-9e70-07dd60b02014" containerID="57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c" exitCode=143 Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.970921 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"643eb1be-46d3-4334-9e70-07dd60b02014","Type":"ContainerDied","Data":"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c"} Dec 10 13:14:52 crc kubenswrapper[4921]: I1210 13:14:52.970969 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"643eb1be-46d3-4334-9e70-07dd60b02014","Type":"ContainerDied","Data":"408cfb8d00257cbe59536e30412485c8ff0526fae9bcfefc33218b501b140d59"} Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.003655 4921 scope.go:117] "RemoveContainer" containerID="57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.034880 4921 scope.go:117] "RemoveContainer" containerID="76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3" Dec 10 13:14:53 crc kubenswrapper[4921]: E1210 13:14:53.036217 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3\": container with ID starting with 76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3 not found: ID does not exist" containerID="76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.036257 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3"} err="failed to get container status \"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3\": rpc error: code = NotFound desc = could not find container \"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3\": container with ID starting with 76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3 not found: ID does not exist" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.036508 4921 scope.go:117] "RemoveContainer" containerID="57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c" Dec 10 13:14:53 crc kubenswrapper[4921]: E1210 13:14:53.036981 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c\": container with ID starting with 57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c not found: ID does not exist" containerID="57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.037533 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c"} err="failed to get container status \"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c\": rpc error: code = NotFound desc = could not find container \"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c\": container with ID starting with 57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c not found: ID does not exist" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.037680 4921 scope.go:117] "RemoveContainer" containerID="76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.038187 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3"} err="failed to get container status \"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3\": rpc error: code = NotFound desc = could not find container \"76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3\": container with ID starting with 76adc7d837280682dd3f121a8ced5cf43eb3fb2d271da61e2aabe0889211f4d3 not found: ID does not exist" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.038261 4921 scope.go:117] "RemoveContainer" containerID="57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.039033 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c"} err="failed to get container status \"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c\": rpc error: code = NotFound desc = could not find container \"57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c\": container with ID starting with 57b38f141060c39ef2f7eafdb423b433b5606d57ebfde44a6a30f3b03db9cb4c not found: ID does not exist" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.088796 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jm9q\" (UniqueName: \"kubernetes.io/projected/643eb1be-46d3-4334-9e70-07dd60b02014-kube-api-access-2jm9q\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089009 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-combined-ca-bundle\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089083 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643eb1be-46d3-4334-9e70-07dd60b02014-logs\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089152 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089237 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/643eb1be-46d3-4334-9e70-07dd60b02014-etc-machine-id\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089335 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data-custom\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089447 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-scripts\") pod \"643eb1be-46d3-4334-9e70-07dd60b02014\" (UID: \"643eb1be-46d3-4334-9e70-07dd60b02014\") " Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089555 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/643eb1be-46d3-4334-9e70-07dd60b02014-logs" (OuterVolumeSpecName: "logs") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.089621 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/643eb1be-46d3-4334-9e70-07dd60b02014-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.090495 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/643eb1be-46d3-4334-9e70-07dd60b02014-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.090524 4921 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/643eb1be-46d3-4334-9e70-07dd60b02014-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.095006 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-scripts" (OuterVolumeSpecName: "scripts") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.095063 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.100587 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/643eb1be-46d3-4334-9e70-07dd60b02014-kube-api-access-2jm9q" (OuterVolumeSpecName: "kube-api-access-2jm9q") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "kube-api-access-2jm9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.130057 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.154564 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data" (OuterVolumeSpecName: "config-data") pod "643eb1be-46d3-4334-9e70-07dd60b02014" (UID: "643eb1be-46d3-4334-9e70-07dd60b02014"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.192188 4921 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.192231 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.192247 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jm9q\" (UniqueName: \"kubernetes.io/projected/643eb1be-46d3-4334-9e70-07dd60b02014-kube-api-access-2jm9q\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.192266 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.192283 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/643eb1be-46d3-4334-9e70-07dd60b02014-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.312790 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.323868 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.338766 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:53 crc kubenswrapper[4921]: E1210 13:14:53.340074 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api-log" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.340097 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api-log" Dec 10 13:14:53 crc kubenswrapper[4921]: E1210 13:14:53.340121 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.340130 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api" Dec 10 13:14:53 crc kubenswrapper[4921]: E1210 13:14:53.341045 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api-log" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.341063 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api-log" Dec 10 13:14:53 crc kubenswrapper[4921]: E1210 13:14:53.341075 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.341082 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.341309 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api-log" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.341327 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.341339 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" containerName="cinder-api-log" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.341348 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a179cfa2-4a28-42ee-a55e-e95e808bb297" containerName="barbican-api" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.342324 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.344582 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.344790 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.351633 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.355451 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.498672 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499043 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-logs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499099 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499125 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499183 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-scripts\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499243 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499296 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-config-data\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499330 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cjwx\" (UniqueName: \"kubernetes.io/projected/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-kube-api-access-8cjwx\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.499358 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-config-data-custom\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600806 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-config-data\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600857 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cjwx\" (UniqueName: \"kubernetes.io/projected/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-kube-api-access-8cjwx\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600877 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-config-data-custom\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600907 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600928 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-logs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600961 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.600983 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.601024 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-scripts\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.601049 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.601745 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.603902 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-logs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.608377 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.612252 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-config-data-custom\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.620447 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-config-data\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.620826 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.621998 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.638103 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-scripts\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.638443 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cjwx\" (UniqueName: \"kubernetes.io/projected/bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2-kube-api-access-8cjwx\") pod \"cinder-api-0\" (UID: \"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2\") " pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.666791 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.731801 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-565cd4b5db-xfcb6" Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.982251 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d85ebdfb-8a6b-4630-af89-98a7fc5a6987","Type":"ContainerStarted","Data":"773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770"} Dec 10 13:14:53 crc kubenswrapper[4921]: I1210 13:14:53.982302 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d85ebdfb-8a6b-4630-af89-98a7fc5a6987","Type":"ContainerStarted","Data":"51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92"} Dec 10 13:14:54 crc kubenswrapper[4921]: I1210 13:14:54.006158 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.87001764 podStartE2EDuration="6.006127305s" podCreationTimestamp="2025-12-10 13:14:48 +0000 UTC" firstStartedPulling="2025-12-10 13:14:49.288846856 +0000 UTC m=+1086.505068780" lastFinishedPulling="2025-12-10 13:14:52.424956521 +0000 UTC m=+1089.641178445" observedRunningTime="2025-12-10 13:14:54.004836511 +0000 UTC m=+1091.221058445" watchObservedRunningTime="2025-12-10 13:14:54.006127305 +0000 UTC m=+1091.222349229" Dec 10 13:14:54 crc kubenswrapper[4921]: I1210 13:14:54.039005 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-766458cb5f-2g6ln" Dec 10 13:14:54 crc kubenswrapper[4921]: I1210 13:14:54.132382 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f7c76d556-mfrfz"] Dec 10 13:14:54 crc kubenswrapper[4921]: I1210 13:14:54.132603 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-f7c76d556-mfrfz" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-api" containerID="cri-o://27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72" gracePeriod=30 Dec 10 13:14:54 crc kubenswrapper[4921]: I1210 13:14:54.132994 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-f7c76d556-mfrfz" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-httpd" containerID="cri-o://c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff" gracePeriod=30 Dec 10 13:14:54 crc kubenswrapper[4921]: I1210 13:14:54.175130 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 13:14:54 crc kubenswrapper[4921]: W1210 13:14:54.195892 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbff237c9_cb6c_4dc1_bdd0_499a5c7f33d2.slice/crio-744d105e4756a0418f01a99fb874b85ec7f965f3987bf11581aa4a565d856a5b WatchSource:0}: Error finding container 744d105e4756a0418f01a99fb874b85ec7f965f3987bf11581aa4a565d856a5b: Status 404 returned error can't find the container with id 744d105e4756a0418f01a99fb874b85ec7f965f3987bf11581aa4a565d856a5b Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.023783 4921 generic.go:334] "Generic (PLEG): container finished" podID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerID="c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff" exitCode=0 Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.023925 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f7c76d556-mfrfz" event={"ID":"7c3d07a5-ac22-4395-9027-78255ba114ca","Type":"ContainerDied","Data":"c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff"} Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.045008 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2","Type":"ContainerStarted","Data":"24cb17ddf28458fd6e2117ad3ff362d5bfbc0f558bf8df705df6783103d3bbb8"} Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.045041 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2","Type":"ContainerStarted","Data":"744d105e4756a0418f01a99fb874b85ec7f965f3987bf11581aa4a565d856a5b"} Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.216486 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="643eb1be-46d3-4334-9e70-07dd60b02014" path="/var/lib/kubelet/pods/643eb1be-46d3-4334-9e70-07dd60b02014/volumes" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.596188 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.597530 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.600048 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.600060 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.600152 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-k6pt4" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.609747 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.759740 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-openstack-config\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.759833 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpn4g\" (UniqueName: \"kubernetes.io/projected/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-kube-api-access-gpn4g\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.759861 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.759891 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-openstack-config-secret\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.861547 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-openstack-config\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.861632 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpn4g\" (UniqueName: \"kubernetes.io/projected/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-kube-api-access-gpn4g\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.861662 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.861690 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-openstack-config-secret\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.863621 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-openstack-config\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.866099 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-openstack-config-secret\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.875088 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.883512 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpn4g\" (UniqueName: \"kubernetes.io/projected/5c6a92bb-c309-4db7-89c9-4f6fb2a8c069-kube-api-access-gpn4g\") pod \"openstackclient\" (UID: \"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069\") " pod="openstack/openstackclient" Dec 10 13:14:55 crc kubenswrapper[4921]: I1210 13:14:55.943015 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 13:14:56 crc kubenswrapper[4921]: I1210 13:14:56.092562 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2","Type":"ContainerStarted","Data":"1eec7ba1e2147b1af51a892081b984afab6af1dece788d91f402218cdc887690"} Dec 10 13:14:56 crc kubenswrapper[4921]: I1210 13:14:56.093915 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 10 13:14:56 crc kubenswrapper[4921]: I1210 13:14:56.113469 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.113434779 podStartE2EDuration="3.113434779s" podCreationTimestamp="2025-12-10 13:14:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:14:56.111738904 +0000 UTC m=+1093.327960838" watchObservedRunningTime="2025-12-10 13:14:56.113434779 +0000 UTC m=+1093.329656713" Dec 10 13:14:56 crc kubenswrapper[4921]: I1210 13:14:56.452363 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 10 13:14:56 crc kubenswrapper[4921]: W1210 13:14:56.460052 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c6a92bb_c309_4db7_89c9_4f6fb2a8c069.slice/crio-7c0da898de23532ced53b5a146f827b20ad6d05d544020a2596c4b3ee8030818 WatchSource:0}: Error finding container 7c0da898de23532ced53b5a146f827b20ad6d05d544020a2596c4b3ee8030818: Status 404 returned error can't find the container with id 7c0da898de23532ced53b5a146f827b20ad6d05d544020a2596c4b3ee8030818 Dec 10 13:14:57 crc kubenswrapper[4921]: I1210 13:14:57.103208 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069","Type":"ContainerStarted","Data":"7c0da898de23532ced53b5a146f827b20ad6d05d544020a2596c4b3ee8030818"} Dec 10 13:14:58 crc kubenswrapper[4921]: I1210 13:14:58.636773 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:14:58 crc kubenswrapper[4921]: I1210 13:14:58.688717 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 10 13:14:58 crc kubenswrapper[4921]: I1210 13:14:58.702277 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-gbgn5"] Dec 10 13:14:58 crc kubenswrapper[4921]: I1210 13:14:58.702537 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerName="dnsmasq-dns" containerID="cri-o://45c0f4a624d35bb92ef11013632cf8f6e37292e2c5fdf886434e207c4744679e" gracePeriod=10 Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.116740 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.187403 4921 generic.go:334] "Generic (PLEG): container finished" podID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerID="45c0f4a624d35bb92ef11013632cf8f6e37292e2c5fdf886434e207c4744679e" exitCode=0 Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.187463 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" event={"ID":"9282d0bd-cea3-4e42-9d5f-778f666aa65a","Type":"ContainerDied","Data":"45c0f4a624d35bb92ef11013632cf8f6e37292e2c5fdf886434e207c4744679e"} Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.284892 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.292531 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.466933 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpbzd\" (UniqueName: \"kubernetes.io/projected/9282d0bd-cea3-4e42-9d5f-778f666aa65a-kube-api-access-hpbzd\") pod \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.467016 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-dns-svc\") pod \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.467111 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-nb\") pod \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.467200 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-config\") pod \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.467218 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-sb\") pod \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\" (UID: \"9282d0bd-cea3-4e42-9d5f-778f666aa65a\") " Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.504890 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9282d0bd-cea3-4e42-9d5f-778f666aa65a-kube-api-access-hpbzd" (OuterVolumeSpecName: "kube-api-access-hpbzd") pod "9282d0bd-cea3-4e42-9d5f-778f666aa65a" (UID: "9282d0bd-cea3-4e42-9d5f-778f666aa65a"). InnerVolumeSpecName "kube-api-access-hpbzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.524785 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9282d0bd-cea3-4e42-9d5f-778f666aa65a" (UID: "9282d0bd-cea3-4e42-9d5f-778f666aa65a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.534431 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-config" (OuterVolumeSpecName: "config") pod "9282d0bd-cea3-4e42-9d5f-778f666aa65a" (UID: "9282d0bd-cea3-4e42-9d5f-778f666aa65a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.558758 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9282d0bd-cea3-4e42-9d5f-778f666aa65a" (UID: "9282d0bd-cea3-4e42-9d5f-778f666aa65a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.559523 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9282d0bd-cea3-4e42-9d5f-778f666aa65a" (UID: "9282d0bd-cea3-4e42-9d5f-778f666aa65a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.571149 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.571182 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.571190 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.571202 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpbzd\" (UniqueName: \"kubernetes.io/projected/9282d0bd-cea3-4e42-9d5f-778f666aa65a-kube-api-access-hpbzd\") on node \"crc\" DevicePath \"\"" Dec 10 13:14:59 crc kubenswrapper[4921]: I1210 13:14:59.571211 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9282d0bd-cea3-4e42-9d5f-778f666aa65a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.185853 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm"] Dec 10 13:15:00 crc kubenswrapper[4921]: E1210 13:15:00.187480 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerName="init" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.200364 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerName="init" Dec 10 13:15:00 crc kubenswrapper[4921]: E1210 13:15:00.200773 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerName="dnsmasq-dns" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.200856 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerName="dnsmasq-dns" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.201328 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" containerName="dnsmasq-dns" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.202996 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="cinder-scheduler" containerID="cri-o://51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92" gracePeriod=30 Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.203284 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.203579 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="probe" containerID="cri-o://773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770" gracePeriod=30 Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.203329 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" event={"ID":"9282d0bd-cea3-4e42-9d5f-778f666aa65a","Type":"ContainerDied","Data":"eb99c6fab6077b00e443554c1d92e8a1311ce3bad5d8b459addfdfeda16b1569"} Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.203675 4921 scope.go:117] "RemoveContainer" containerID="45c0f4a624d35bb92ef11013632cf8f6e37292e2c5fdf886434e207c4744679e" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.203300 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-gbgn5" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.207211 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.209349 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm"] Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.216206 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.284015 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-config-volume\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.284212 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-secret-volume\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.284277 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc2h5\" (UniqueName: \"kubernetes.io/projected/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-kube-api-access-rc2h5\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.318527 4921 scope.go:117] "RemoveContainer" containerID="12afae38cbf0087fedac75025736e96a07a2d919bace3df6fb8a8bff614ccb4f" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.369553 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-gbgn5"] Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.377346 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-gbgn5"] Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.388519 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc2h5\" (UniqueName: \"kubernetes.io/projected/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-kube-api-access-rc2h5\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.388645 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-config-volume\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.388907 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-secret-volume\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.390315 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-config-volume\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.410292 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc2h5\" (UniqueName: \"kubernetes.io/projected/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-kube-api-access-rc2h5\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.418657 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-secret-volume\") pod \"collect-profiles-29422875-zkxsm\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:00 crc kubenswrapper[4921]: I1210 13:15:00.642301 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:01 crc kubenswrapper[4921]: I1210 13:15:01.132414 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm"] Dec 10 13:15:01 crc kubenswrapper[4921]: I1210 13:15:01.205564 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9282d0bd-cea3-4e42-9d5f-778f666aa65a" path="/var/lib/kubelet/pods/9282d0bd-cea3-4e42-9d5f-778f666aa65a/volumes" Dec 10 13:15:01 crc kubenswrapper[4921]: I1210 13:15:01.214351 4921 generic.go:334] "Generic (PLEG): container finished" podID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerID="51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92" exitCode=0 Dec 10 13:15:01 crc kubenswrapper[4921]: I1210 13:15:01.214441 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d85ebdfb-8a6b-4630-af89-98a7fc5a6987","Type":"ContainerDied","Data":"51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92"} Dec 10 13:15:01 crc kubenswrapper[4921]: I1210 13:15:01.215851 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" event={"ID":"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c","Type":"ContainerStarted","Data":"efb1d40ba01a4a1e2f0afcca780808388b60f5699020e81811e1d71acd695e8a"} Dec 10 13:15:01 crc kubenswrapper[4921]: I1210 13:15:01.878301 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.014841 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-etc-machine-id\") pod \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.014936 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data\") pod \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.014957 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-combined-ca-bundle\") pod \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.015015 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-scripts\") pod \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.015046 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data-custom\") pod \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.015087 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4fwp\" (UniqueName: \"kubernetes.io/projected/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-kube-api-access-m4fwp\") pod \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\" (UID: \"d85ebdfb-8a6b-4630-af89-98a7fc5a6987\") " Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.015333 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d85ebdfb-8a6b-4630-af89-98a7fc5a6987" (UID: "d85ebdfb-8a6b-4630-af89-98a7fc5a6987"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.016295 4921 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.022534 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-scripts" (OuterVolumeSpecName: "scripts") pod "d85ebdfb-8a6b-4630-af89-98a7fc5a6987" (UID: "d85ebdfb-8a6b-4630-af89-98a7fc5a6987"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.032648 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d85ebdfb-8a6b-4630-af89-98a7fc5a6987" (UID: "d85ebdfb-8a6b-4630-af89-98a7fc5a6987"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.051715 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-kube-api-access-m4fwp" (OuterVolumeSpecName: "kube-api-access-m4fwp") pod "d85ebdfb-8a6b-4630-af89-98a7fc5a6987" (UID: "d85ebdfb-8a6b-4630-af89-98a7fc5a6987"). InnerVolumeSpecName "kube-api-access-m4fwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.095628 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d85ebdfb-8a6b-4630-af89-98a7fc5a6987" (UID: "d85ebdfb-8a6b-4630-af89-98a7fc5a6987"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.118755 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.118801 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.118814 4921 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.118825 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4fwp\" (UniqueName: \"kubernetes.io/projected/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-kube-api-access-m4fwp\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.190336 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data" (OuterVolumeSpecName: "config-data") pod "d85ebdfb-8a6b-4630-af89-98a7fc5a6987" (UID: "d85ebdfb-8a6b-4630-af89-98a7fc5a6987"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.220159 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d85ebdfb-8a6b-4630-af89-98a7fc5a6987-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.238869 4921 generic.go:334] "Generic (PLEG): container finished" podID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerID="773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770" exitCode=0 Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.239241 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d85ebdfb-8a6b-4630-af89-98a7fc5a6987","Type":"ContainerDied","Data":"773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770"} Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.239335 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d85ebdfb-8a6b-4630-af89-98a7fc5a6987","Type":"ContainerDied","Data":"bf3c18b4eeecacef80b34c44519ebafd683379f6c29d87a721e082938c8ed40c"} Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.239434 4921 scope.go:117] "RemoveContainer" containerID="773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.239799 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.245631 4921 generic.go:334] "Generic (PLEG): container finished" podID="16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" containerID="fd7ba3fa4d8bcc38f88f3363ae9e1ac4fc167459ba85e55bcbcdf0a3ae86d612" exitCode=0 Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.245676 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" event={"ID":"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c","Type":"ContainerDied","Data":"fd7ba3fa4d8bcc38f88f3363ae9e1ac4fc167459ba85e55bcbcdf0a3ae86d612"} Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.283761 4921 scope.go:117] "RemoveContainer" containerID="51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.289706 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.303596 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.311120 4921 scope.go:117] "RemoveContainer" containerID="773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770" Dec 10 13:15:02 crc kubenswrapper[4921]: E1210 13:15:02.315573 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770\": container with ID starting with 773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770 not found: ID does not exist" containerID="773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.315628 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770"} err="failed to get container status \"773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770\": rpc error: code = NotFound desc = could not find container \"773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770\": container with ID starting with 773ed1cdde5df7cb5d4e30de38338384a1322c50be42d364cb919f5960eb0770 not found: ID does not exist" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.315658 4921 scope.go:117] "RemoveContainer" containerID="51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92" Dec 10 13:15:02 crc kubenswrapper[4921]: E1210 13:15:02.317283 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92\": container with ID starting with 51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92 not found: ID does not exist" containerID="51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.317315 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92"} err="failed to get container status \"51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92\": rpc error: code = NotFound desc = could not find container \"51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92\": container with ID starting with 51a773549fb76ff6236746f5c694fb722035182357f6cc549b1a9231699a4c92 not found: ID does not exist" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.328070 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:15:02 crc kubenswrapper[4921]: E1210 13:15:02.328504 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="cinder-scheduler" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.328522 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="cinder-scheduler" Dec 10 13:15:02 crc kubenswrapper[4921]: E1210 13:15:02.328540 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="probe" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.341303 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="probe" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.341719 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="cinder-scheduler" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.341753 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" containerName="probe" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.342650 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.352203 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.353735 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.424358 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.424710 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.424916 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.424962 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk7sf\" (UniqueName: \"kubernetes.io/projected/6c9638f4-5794-40eb-8303-6301fabc3fb3-kube-api-access-zk7sf\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.425140 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.425308 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c9638f4-5794-40eb-8303-6301fabc3fb3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.527690 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.527782 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.527801 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk7sf\" (UniqueName: \"kubernetes.io/projected/6c9638f4-5794-40eb-8303-6301fabc3fb3-kube-api-access-zk7sf\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.527835 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.527875 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c9638f4-5794-40eb-8303-6301fabc3fb3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.527915 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.532898 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6c9638f4-5794-40eb-8303-6301fabc3fb3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.535060 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-config-data\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.537093 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.537270 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-scripts\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.538877 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c9638f4-5794-40eb-8303-6301fabc3fb3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.547219 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk7sf\" (UniqueName: \"kubernetes.io/projected/6c9638f4-5794-40eb-8303-6301fabc3fb3-kube-api-access-zk7sf\") pod \"cinder-scheduler-0\" (UID: \"6c9638f4-5794-40eb-8303-6301fabc3fb3\") " pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.675590 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.942256 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.950926 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-central-agent" containerID="cri-o://006a32c358d67b2a512f882097b0e99c53f37ba3797bbb62383de910c679d1cb" gracePeriod=30 Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.951779 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="proxy-httpd" containerID="cri-o://fb206cf0501cc82c3e2ffaff2b457c7b4f872a14a253f62f7f5a083ca621d141" gracePeriod=30 Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.951843 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="sg-core" containerID="cri-o://494d73178db963ba95bdf08818a51444e915c8afffa6d8ce10c3493d7ad9d1b6" gracePeriod=30 Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.951888 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-notification-agent" containerID="cri-o://02c830af213e1994e4cfd05099da4461a5e2e41437e742d546c59521e54603e9" gracePeriod=30 Dec 10 13:15:02 crc kubenswrapper[4921]: I1210 13:15:02.973280 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.148:3000/\": EOF" Dec 10 13:15:03 crc kubenswrapper[4921]: W1210 13:15:03.208120 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c9638f4_5794_40eb_8303_6301fabc3fb3.slice/crio-545ad0b623d47a324548bdc1625e87fbe13dcab04af662c733ff00ed29923f5f WatchSource:0}: Error finding container 545ad0b623d47a324548bdc1625e87fbe13dcab04af662c733ff00ed29923f5f: Status 404 returned error can't find the container with id 545ad0b623d47a324548bdc1625e87fbe13dcab04af662c733ff00ed29923f5f Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.243024 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d85ebdfb-8a6b-4630-af89-98a7fc5a6987" path="/var/lib/kubelet/pods/d85ebdfb-8a6b-4630-af89-98a7fc5a6987/volumes" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.244090 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.301674 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c9638f4-5794-40eb-8303-6301fabc3fb3","Type":"ContainerStarted","Data":"545ad0b623d47a324548bdc1625e87fbe13dcab04af662c733ff00ed29923f5f"} Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.304799 4921 generic.go:334] "Generic (PLEG): container finished" podID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerID="fb206cf0501cc82c3e2ffaff2b457c7b4f872a14a253f62f7f5a083ca621d141" exitCode=0 Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.304823 4921 generic.go:334] "Generic (PLEG): container finished" podID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerID="494d73178db963ba95bdf08818a51444e915c8afffa6d8ce10c3493d7ad9d1b6" exitCode=2 Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.304834 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerDied","Data":"fb206cf0501cc82c3e2ffaff2b457c7b4f872a14a253f62f7f5a083ca621d141"} Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.304880 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerDied","Data":"494d73178db963ba95bdf08818a51444e915c8afffa6d8ce10c3493d7ad9d1b6"} Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.755273 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.803814 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.877524 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-secret-volume\") pod \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.877576 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc2h5\" (UniqueName: \"kubernetes.io/projected/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-kube-api-access-rc2h5\") pod \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.877758 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-config-volume\") pod \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\" (UID: \"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.879100 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-config-volume" (OuterVolumeSpecName: "config-volume") pod "16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" (UID: "16eb1eea-87f2-4fd2-8987-3a6f12d53a6c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.888236 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" (UID: "16eb1eea-87f2-4fd2-8987-3a6f12d53a6c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.891558 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-kube-api-access-rc2h5" (OuterVolumeSpecName: "kube-api-access-rc2h5") pod "16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" (UID: "16eb1eea-87f2-4fd2-8987-3a6f12d53a6c"). InnerVolumeSpecName "kube-api-access-rc2h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.979543 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-ovndb-tls-certs\") pod \"7c3d07a5-ac22-4395-9027-78255ba114ca\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.979934 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-combined-ca-bundle\") pod \"7c3d07a5-ac22-4395-9027-78255ba114ca\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.979972 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-config\") pod \"7c3d07a5-ac22-4395-9027-78255ba114ca\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.979991 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-httpd-config\") pod \"7c3d07a5-ac22-4395-9027-78255ba114ca\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.980110 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b868n\" (UniqueName: \"kubernetes.io/projected/7c3d07a5-ac22-4395-9027-78255ba114ca-kube-api-access-b868n\") pod \"7c3d07a5-ac22-4395-9027-78255ba114ca\" (UID: \"7c3d07a5-ac22-4395-9027-78255ba114ca\") " Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.980458 4921 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.980471 4921 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.980482 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc2h5\" (UniqueName: \"kubernetes.io/projected/16eb1eea-87f2-4fd2-8987-3a6f12d53a6c-kube-api-access-rc2h5\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.988721 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "7c3d07a5-ac22-4395-9027-78255ba114ca" (UID: "7c3d07a5-ac22-4395-9027-78255ba114ca"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:03 crc kubenswrapper[4921]: I1210 13:15:03.992656 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c3d07a5-ac22-4395-9027-78255ba114ca-kube-api-access-b868n" (OuterVolumeSpecName: "kube-api-access-b868n") pod "7c3d07a5-ac22-4395-9027-78255ba114ca" (UID: "7c3d07a5-ac22-4395-9027-78255ba114ca"). InnerVolumeSpecName "kube-api-access-b868n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.092702 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b868n\" (UniqueName: \"kubernetes.io/projected/7c3d07a5-ac22-4395-9027-78255ba114ca-kube-api-access-b868n\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.092739 4921 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.105053 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c3d07a5-ac22-4395-9027-78255ba114ca" (UID: "7c3d07a5-ac22-4395-9027-78255ba114ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.144704 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "7c3d07a5-ac22-4395-9027-78255ba114ca" (UID: "7c3d07a5-ac22-4395-9027-78255ba114ca"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.167564 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-config" (OuterVolumeSpecName: "config") pod "7c3d07a5-ac22-4395-9027-78255ba114ca" (UID: "7c3d07a5-ac22-4395-9027-78255ba114ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.194027 4921 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.194053 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.194078 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7c3d07a5-ac22-4395-9027-78255ba114ca-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.315304 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" event={"ID":"16eb1eea-87f2-4fd2-8987-3a6f12d53a6c","Type":"ContainerDied","Data":"efb1d40ba01a4a1e2f0afcca780808388b60f5699020e81811e1d71acd695e8a"} Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.315345 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efb1d40ba01a4a1e2f0afcca780808388b60f5699020e81811e1d71acd695e8a" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.315468 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422875-zkxsm" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.334063 4921 generic.go:334] "Generic (PLEG): container finished" podID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerID="006a32c358d67b2a512f882097b0e99c53f37ba3797bbb62383de910c679d1cb" exitCode=0 Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.334165 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerDied","Data":"006a32c358d67b2a512f882097b0e99c53f37ba3797bbb62383de910c679d1cb"} Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.338357 4921 generic.go:334] "Generic (PLEG): container finished" podID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerID="27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72" exitCode=0 Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.338493 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f7c76d556-mfrfz" event={"ID":"7c3d07a5-ac22-4395-9027-78255ba114ca","Type":"ContainerDied","Data":"27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72"} Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.338518 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f7c76d556-mfrfz" event={"ID":"7c3d07a5-ac22-4395-9027-78255ba114ca","Type":"ContainerDied","Data":"24ae530384db7f467e477a62b1177a7093b13a504dbd293a765cf23b5500fc43"} Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.338540 4921 scope.go:117] "RemoveContainer" containerID="c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.339314 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f7c76d556-mfrfz" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.391266 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f7c76d556-mfrfz"] Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.399704 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f7c76d556-mfrfz"] Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.541206 4921 scope.go:117] "RemoveContainer" containerID="27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.950831 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-w99v8"] Dec 10 13:15:04 crc kubenswrapper[4921]: E1210 13:15:04.951163 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" containerName="collect-profiles" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951178 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" containerName="collect-profiles" Dec 10 13:15:04 crc kubenswrapper[4921]: E1210 13:15:04.951209 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-httpd" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951214 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-httpd" Dec 10 13:15:04 crc kubenswrapper[4921]: E1210 13:15:04.951230 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-api" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951235 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-api" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951382 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="16eb1eea-87f2-4fd2-8987-3a6f12d53a6c" containerName="collect-profiles" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951418 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-httpd" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951431 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" containerName="neutron-api" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.951963 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:04 crc kubenswrapper[4921]: I1210 13:15:04.971464 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-w99v8"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.051829 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-rv5zq"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.053044 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.074458 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1e5d-account-create-update-27txp"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.079867 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.087209 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.092829 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rv5zq"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.112354 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbsqd\" (UniqueName: \"kubernetes.io/projected/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-kube-api-access-fbsqd\") pod \"nova-api-db-create-w99v8\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.112427 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-operator-scripts\") pod \"nova-api-db-create-w99v8\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.122658 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1e5d-account-create-update-27txp"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.203491 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c3d07a5-ac22-4395-9027-78255ba114ca" path="/var/lib/kubelet/pods/7c3d07a5-ac22-4395-9027-78255ba114ca/volumes" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.216041 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsnmn\" (UniqueName: \"kubernetes.io/projected/7a270057-fdbe-4726-9b34-6d42b47027fd-kube-api-access-zsnmn\") pod \"nova-cell0-db-create-rv5zq\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.216131 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbsqd\" (UniqueName: \"kubernetes.io/projected/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-kube-api-access-fbsqd\") pod \"nova-api-db-create-w99v8\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.216163 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-operator-scripts\") pod \"nova-api-db-create-w99v8\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.216260 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scxlq\" (UniqueName: \"kubernetes.io/projected/0837454b-bf93-4505-b41e-6a6c7ddd5793-kube-api-access-scxlq\") pod \"nova-api-1e5d-account-create-update-27txp\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.216285 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0837454b-bf93-4505-b41e-6a6c7ddd5793-operator-scripts\") pod \"nova-api-1e5d-account-create-update-27txp\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.216334 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a270057-fdbe-4726-9b34-6d42b47027fd-operator-scripts\") pod \"nova-cell0-db-create-rv5zq\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.217331 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-operator-scripts\") pod \"nova-api-db-create-w99v8\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.249066 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pqljd"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.250380 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.264765 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pqljd"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.268160 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbsqd\" (UniqueName: \"kubernetes.io/projected/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-kube-api-access-fbsqd\") pod \"nova-api-db-create-w99v8\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.298143 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-fa22-account-create-update-tzvqm"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.299191 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.306741 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.317971 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scxlq\" (UniqueName: \"kubernetes.io/projected/0837454b-bf93-4505-b41e-6a6c7ddd5793-kube-api-access-scxlq\") pod \"nova-api-1e5d-account-create-update-27txp\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.318004 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0837454b-bf93-4505-b41e-6a6c7ddd5793-operator-scripts\") pod \"nova-api-1e5d-account-create-update-27txp\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.318051 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a270057-fdbe-4726-9b34-6d42b47027fd-operator-scripts\") pod \"nova-cell0-db-create-rv5zq\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.318104 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsnmn\" (UniqueName: \"kubernetes.io/projected/7a270057-fdbe-4726-9b34-6d42b47027fd-kube-api-access-zsnmn\") pod \"nova-cell0-db-create-rv5zq\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.319007 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0837454b-bf93-4505-b41e-6a6c7ddd5793-operator-scripts\") pod \"nova-api-1e5d-account-create-update-27txp\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.319838 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a270057-fdbe-4726-9b34-6d42b47027fd-operator-scripts\") pod \"nova-cell0-db-create-rv5zq\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.330795 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-fa22-account-create-update-tzvqm"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.354428 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scxlq\" (UniqueName: \"kubernetes.io/projected/0837454b-bf93-4505-b41e-6a6c7ddd5793-kube-api-access-scxlq\") pod \"nova-api-1e5d-account-create-update-27txp\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.376234 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsnmn\" (UniqueName: \"kubernetes.io/projected/7a270057-fdbe-4726-9b34-6d42b47027fd-kube-api-access-zsnmn\") pod \"nova-cell0-db-create-rv5zq\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.404757 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.413564 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.428103 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd58b\" (UniqueName: \"kubernetes.io/projected/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-kube-api-access-zd58b\") pod \"nova-cell0-fa22-account-create-update-tzvqm\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.428182 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d43add4e-46f3-4485-815e-363015a4d1b0-operator-scripts\") pod \"nova-cell1-db-create-pqljd\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.428221 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxfz\" (UniqueName: \"kubernetes.io/projected/d43add4e-46f3-4485-815e-363015a4d1b0-kube-api-access-ppxfz\") pod \"nova-cell1-db-create-pqljd\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.428293 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-operator-scripts\") pod \"nova-cell0-fa22-account-create-update-tzvqm\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.439639 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c9638f4-5794-40eb-8303-6301fabc3fb3","Type":"ContainerStarted","Data":"e0d9b570378c1d4cf9e628c85bf36cb250837e6b4b992a6ac81ea5db7b9d0e0d"} Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.491274 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-86e3-account-create-update-lp5pz"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.492504 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.496965 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.503412 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-86e3-account-create-update-lp5pz"] Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.529610 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd58b\" (UniqueName: \"kubernetes.io/projected/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-kube-api-access-zd58b\") pod \"nova-cell0-fa22-account-create-update-tzvqm\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.529689 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d43add4e-46f3-4485-815e-363015a4d1b0-operator-scripts\") pod \"nova-cell1-db-create-pqljd\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.529769 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxfz\" (UniqueName: \"kubernetes.io/projected/d43add4e-46f3-4485-815e-363015a4d1b0-kube-api-access-ppxfz\") pod \"nova-cell1-db-create-pqljd\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.529872 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-operator-scripts\") pod \"nova-cell0-fa22-account-create-update-tzvqm\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.530799 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-operator-scripts\") pod \"nova-cell0-fa22-account-create-update-tzvqm\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.535872 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d43add4e-46f3-4485-815e-363015a4d1b0-operator-scripts\") pod \"nova-cell1-db-create-pqljd\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.551416 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxfz\" (UniqueName: \"kubernetes.io/projected/d43add4e-46f3-4485-815e-363015a4d1b0-kube-api-access-ppxfz\") pod \"nova-cell1-db-create-pqljd\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.554148 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd58b\" (UniqueName: \"kubernetes.io/projected/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-kube-api-access-zd58b\") pod \"nova-cell0-fa22-account-create-update-tzvqm\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.567993 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.633336 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgfjz\" (UniqueName: \"kubernetes.io/projected/1858d166-0eea-410c-ba8a-69f9968e698e-kube-api-access-fgfjz\") pod \"nova-cell1-86e3-account-create-update-lp5pz\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.633381 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1858d166-0eea-410c-ba8a-69f9968e698e-operator-scripts\") pod \"nova-cell1-86e3-account-create-update-lp5pz\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.736262 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgfjz\" (UniqueName: \"kubernetes.io/projected/1858d166-0eea-410c-ba8a-69f9968e698e-kube-api-access-fgfjz\") pod \"nova-cell1-86e3-account-create-update-lp5pz\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.736309 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1858d166-0eea-410c-ba8a-69f9968e698e-operator-scripts\") pod \"nova-cell1-86e3-account-create-update-lp5pz\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.737575 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1858d166-0eea-410c-ba8a-69f9968e698e-operator-scripts\") pod \"nova-cell1-86e3-account-create-update-lp5pz\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.757613 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgfjz\" (UniqueName: \"kubernetes.io/projected/1858d166-0eea-410c-ba8a-69f9968e698e-kube-api-access-fgfjz\") pod \"nova-cell1-86e3-account-create-update-lp5pz\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.821133 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.849732 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:05 crc kubenswrapper[4921]: I1210 13:15:05.894986 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:06 crc kubenswrapper[4921]: I1210 13:15:06.452324 4921 generic.go:334] "Generic (PLEG): container finished" podID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerID="02c830af213e1994e4cfd05099da4461a5e2e41437e742d546c59521e54603e9" exitCode=0 Dec 10 13:15:06 crc kubenswrapper[4921]: I1210 13:15:06.452606 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerDied","Data":"02c830af213e1994e4cfd05099da4461a5e2e41437e742d546c59521e54603e9"} Dec 10 13:15:06 crc kubenswrapper[4921]: I1210 13:15:06.756972 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 10 13:15:08 crc kubenswrapper[4921]: I1210 13:15:08.062832 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.148:3000/\": dial tcp 10.217.0.148:3000: connect: connection refused" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.336941 4921 scope.go:117] "RemoveContainer" containerID="c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff" Dec 10 13:15:12 crc kubenswrapper[4921]: E1210 13:15:12.355080 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff\": container with ID starting with c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff not found: ID does not exist" containerID="c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.356075 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff"} err="failed to get container status \"c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff\": rpc error: code = NotFound desc = could not find container \"c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff\": container with ID starting with c97b1225f3bb2e6097ffaf57a4192666744d1adcbe1fea4d5c0815faff7025ff not found: ID does not exist" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.356108 4921 scope.go:117] "RemoveContainer" containerID="27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72" Dec 10 13:15:12 crc kubenswrapper[4921]: E1210 13:15:12.356843 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72\": container with ID starting with 27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72 not found: ID does not exist" containerID="27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.356886 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72"} err="failed to get container status \"27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72\": rpc error: code = NotFound desc = could not find container \"27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72\": container with ID starting with 27dee089db8054d40e127e19c6eb01354cf84deecb13d5b3c006979d90831c72 not found: ID does not exist" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.796751 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870282 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-combined-ca-bundle\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870356 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-log-httpd\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870409 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-config-data\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870444 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-run-httpd\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870500 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-sg-core-conf-yaml\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870543 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-scripts\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.870655 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xmd5\" (UniqueName: \"kubernetes.io/projected/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-kube-api-access-4xmd5\") pod \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\" (UID: \"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3\") " Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.871720 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.872060 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.878053 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-kube-api-access-4xmd5" (OuterVolumeSpecName: "kube-api-access-4xmd5") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "kube-api-access-4xmd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.879149 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-scripts" (OuterVolumeSpecName: "scripts") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.919290 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.973487 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.973517 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.973526 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.973537 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:12 crc kubenswrapper[4921]: I1210 13:15:12.973545 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xmd5\" (UniqueName: \"kubernetes.io/projected/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-kube-api-access-4xmd5\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.023857 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.040790 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-config-data" (OuterVolumeSpecName: "config-data") pod "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" (UID: "042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.076382 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.079808 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.082936 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rv5zq"] Dec 10 13:15:13 crc kubenswrapper[4921]: W1210 13:15:13.211061 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbca7d85c_d634_42fd_95c4_1b8955ff1ac1.slice/crio-87c03cfd55adf7cb905ef64c93e6ee6cbddefc4ef291a6589ed337b436ac6cb7 WatchSource:0}: Error finding container 87c03cfd55adf7cb905ef64c93e6ee6cbddefc4ef291a6589ed337b436ac6cb7: Status 404 returned error can't find the container with id 87c03cfd55adf7cb905ef64c93e6ee6cbddefc4ef291a6589ed337b436ac6cb7 Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.238128 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-fa22-account-create-update-tzvqm"] Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.411152 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1e5d-account-create-update-27txp"] Dec 10 13:15:13 crc kubenswrapper[4921]: W1210 13:15:13.411773 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0837454b_bf93_4505_b41e_6a6c7ddd5793.slice/crio-e2a5d8440197d902d915550a1b34bf3ef8c481479fc5c0652b118fb9f446f212 WatchSource:0}: Error finding container e2a5d8440197d902d915550a1b34bf3ef8c481479fc5c0652b118fb9f446f212: Status 404 returned error can't find the container with id e2a5d8440197d902d915550a1b34bf3ef8c481479fc5c0652b118fb9f446f212 Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.429576 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pqljd"] Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.437510 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-86e3-account-create-update-lp5pz"] Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.469802 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-w99v8"] Dec 10 13:15:13 crc kubenswrapper[4921]: W1210 13:15:13.473867 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d5a690_c61b_4dfd_be05_b5bfca7f4adf.slice/crio-195a2ea9132ae92172fc3fc789876bc1e22bb36f7bcedfcdf152183cac7c2cfc WatchSource:0}: Error finding container 195a2ea9132ae92172fc3fc789876bc1e22bb36f7bcedfcdf152183cac7c2cfc: Status 404 returned error can't find the container with id 195a2ea9132ae92172fc3fc789876bc1e22bb36f7bcedfcdf152183cac7c2cfc Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.569028 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pqljd" event={"ID":"d43add4e-46f3-4485-815e-363015a4d1b0","Type":"ContainerStarted","Data":"e23ed0ed28ed417e88adc2f3a6f65df2a2cc1b5d736f7a105f26edbfc331292a"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.588810 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rv5zq" event={"ID":"7a270057-fdbe-4726-9b34-6d42b47027fd","Type":"ContainerStarted","Data":"d0e826f63415809cb628deae248d6993ddd3ad7d6d5323b966b965139a1136af"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.588857 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rv5zq" event={"ID":"7a270057-fdbe-4726-9b34-6d42b47027fd","Type":"ContainerStarted","Data":"15e560dba812be7613331013d003ee8a3da32d5869dbe34cff9c244d0dd6fa98"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.596865 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5c6a92bb-c309-4db7-89c9-4f6fb2a8c069","Type":"ContainerStarted","Data":"7909e4a6f645f45bbf87998d83f2474c81e92f8309c20aa90ac34c8970e5ad7f"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.603018 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w99v8" event={"ID":"53d5a690-c61b-4dfd-be05-b5bfca7f4adf","Type":"ContainerStarted","Data":"195a2ea9132ae92172fc3fc789876bc1e22bb36f7bcedfcdf152183cac7c2cfc"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.608430 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-rv5zq" podStartSLOduration=8.608368026 podStartE2EDuration="8.608368026s" podCreationTimestamp="2025-12-10 13:15:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:15:13.60253678 +0000 UTC m=+1110.818758714" watchObservedRunningTime="2025-12-10 13:15:13.608368026 +0000 UTC m=+1110.824589950" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.608750 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" event={"ID":"bca7d85c-d634-42fd-95c4-1b8955ff1ac1","Type":"ContainerStarted","Data":"4e2009f913f12810e76ace5768c5e89bfca792ea46f2f87f96c5c78fb74c7758"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.608784 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" event={"ID":"bca7d85c-d634-42fd-95c4-1b8955ff1ac1","Type":"ContainerStarted","Data":"87c03cfd55adf7cb905ef64c93e6ee6cbddefc4ef291a6589ed337b436ac6cb7"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.633012 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.67167592 podStartE2EDuration="18.632983887s" podCreationTimestamp="2025-12-10 13:14:55 +0000 UTC" firstStartedPulling="2025-12-10 13:14:56.466452844 +0000 UTC m=+1093.682674778" lastFinishedPulling="2025-12-10 13:15:12.427760831 +0000 UTC m=+1109.643982745" observedRunningTime="2025-12-10 13:15:13.624717765 +0000 UTC m=+1110.840939689" watchObservedRunningTime="2025-12-10 13:15:13.632983887 +0000 UTC m=+1110.849205821" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.637106 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3","Type":"ContainerDied","Data":"27067527f96a61d8817fbb9e4f02181d6429092cb85d73c822dda45571282237"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.637158 4921 scope.go:117] "RemoveContainer" containerID="fb206cf0501cc82c3e2ffaff2b457c7b4f872a14a253f62f7f5a083ca621d141" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.637306 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.644201 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1e5d-account-create-update-27txp" event={"ID":"0837454b-bf93-4505-b41e-6a6c7ddd5793","Type":"ContainerStarted","Data":"e2a5d8440197d902d915550a1b34bf3ef8c481479fc5c0652b118fb9f446f212"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.653519 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" podStartSLOduration=8.653497397 podStartE2EDuration="8.653497397s" podCreationTimestamp="2025-12-10 13:15:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:15:13.640688954 +0000 UTC m=+1110.856910878" watchObservedRunningTime="2025-12-10 13:15:13.653497397 +0000 UTC m=+1110.869719321" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.658113 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" event={"ID":"1858d166-0eea-410c-ba8a-69f9968e698e","Type":"ContainerStarted","Data":"c281edf00ad416dd668656997aaf2fce10b6beca5234743c206977f466b90aec"} Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.685236 4921 scope.go:117] "RemoveContainer" containerID="494d73178db963ba95bdf08818a51444e915c8afffa6d8ce10c3493d7ad9d1b6" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.711832 4921 scope.go:117] "RemoveContainer" containerID="02c830af213e1994e4cfd05099da4461a5e2e41437e742d546c59521e54603e9" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.715336 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.727309 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744238 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:13 crc kubenswrapper[4921]: E1210 13:15:13.744641 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-notification-agent" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744661 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-notification-agent" Dec 10 13:15:13 crc kubenswrapper[4921]: E1210 13:15:13.744675 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="proxy-httpd" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744681 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="proxy-httpd" Dec 10 13:15:13 crc kubenswrapper[4921]: E1210 13:15:13.744697 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-central-agent" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744704 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-central-agent" Dec 10 13:15:13 crc kubenswrapper[4921]: E1210 13:15:13.744713 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="sg-core" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744719 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="sg-core" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744890 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="proxy-httpd" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744906 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-central-agent" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744916 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="ceilometer-notification-agent" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.744927 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" containerName="sg-core" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.746435 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.752501 4921 scope.go:117] "RemoveContainer" containerID="006a32c358d67b2a512f882097b0e99c53f37ba3797bbb62383de910c679d1cb" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.756750 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.756986 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.763857 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.896903 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.896962 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.897015 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-scripts\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.897037 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-log-httpd\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.897072 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-config-data\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.897089 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd9xw\" (UniqueName: \"kubernetes.io/projected/b3feec4c-2ede-41fb-874b-47de4c9ff913-kube-api-access-hd9xw\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.897105 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-run-httpd\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.998771 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-log-httpd\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:13 crc kubenswrapper[4921]: I1210 13:15:13.999579 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-config-data\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.000366 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd9xw\" (UniqueName: \"kubernetes.io/projected/b3feec4c-2ede-41fb-874b-47de4c9ff913-kube-api-access-hd9xw\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.000825 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-run-httpd\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.000989 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.001103 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.001236 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-scripts\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.002613 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-run-httpd\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:13.999500 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-log-httpd\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.026328 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.026739 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-scripts\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.033702 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd9xw\" (UniqueName: \"kubernetes.io/projected/b3feec4c-2ede-41fb-874b-47de4c9ff913-kube-api-access-hd9xw\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.033899 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-config-data\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.034610 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.090271 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.638919 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.683177 4921 generic.go:334] "Generic (PLEG): container finished" podID="d43add4e-46f3-4485-815e-363015a4d1b0" containerID="864be27166ab66d9bca75e27fe36fc341708f8cbe8db20d9b124fe94033f650a" exitCode=0 Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.683840 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pqljd" event={"ID":"d43add4e-46f3-4485-815e-363015a4d1b0","Type":"ContainerDied","Data":"864be27166ab66d9bca75e27fe36fc341708f8cbe8db20d9b124fe94033f650a"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.690203 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6c9638f4-5794-40eb-8303-6301fabc3fb3","Type":"ContainerStarted","Data":"7a892258e8caf7486ad289f58e7d26d9bfd1d12195a7528d9d72c79f52c002fb"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.701509 4921 generic.go:334] "Generic (PLEG): container finished" podID="bca7d85c-d634-42fd-95c4-1b8955ff1ac1" containerID="4e2009f913f12810e76ace5768c5e89bfca792ea46f2f87f96c5c78fb74c7758" exitCode=0 Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.701861 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" event={"ID":"bca7d85c-d634-42fd-95c4-1b8955ff1ac1","Type":"ContainerDied","Data":"4e2009f913f12810e76ace5768c5e89bfca792ea46f2f87f96c5c78fb74c7758"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.705761 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerStarted","Data":"e2f8ac05a0aaba09ed3722abe0c9be6c6c8d50fae91d6dadc0c405a82ef39ac1"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.708439 4921 generic.go:334] "Generic (PLEG): container finished" podID="0837454b-bf93-4505-b41e-6a6c7ddd5793" containerID="42b53efb98d353e6ce6f226b70dfec2b5db6da89a424777eaec95aaa10a80ed4" exitCode=0 Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.708640 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1e5d-account-create-update-27txp" event={"ID":"0837454b-bf93-4505-b41e-6a6c7ddd5793","Type":"ContainerDied","Data":"42b53efb98d353e6ce6f226b70dfec2b5db6da89a424777eaec95aaa10a80ed4"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.712190 4921 generic.go:334] "Generic (PLEG): container finished" podID="7a270057-fdbe-4726-9b34-6d42b47027fd" containerID="d0e826f63415809cb628deae248d6993ddd3ad7d6d5323b966b965139a1136af" exitCode=0 Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.712339 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rv5zq" event={"ID":"7a270057-fdbe-4726-9b34-6d42b47027fd","Type":"ContainerDied","Data":"d0e826f63415809cb628deae248d6993ddd3ad7d6d5323b966b965139a1136af"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.724750 4921 generic.go:334] "Generic (PLEG): container finished" podID="53d5a690-c61b-4dfd-be05-b5bfca7f4adf" containerID="c5dba9f00fd76539a9ff655700814bb6c564608cdef33cdf30895858ea4debb5" exitCode=0 Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.724860 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w99v8" event={"ID":"53d5a690-c61b-4dfd-be05-b5bfca7f4adf","Type":"ContainerDied","Data":"c5dba9f00fd76539a9ff655700814bb6c564608cdef33cdf30895858ea4debb5"} Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.732496 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=12.732474164 podStartE2EDuration="12.732474164s" podCreationTimestamp="2025-12-10 13:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:15:14.725256221 +0000 UTC m=+1111.941478155" watchObservedRunningTime="2025-12-10 13:15:14.732474164 +0000 UTC m=+1111.948696098" Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.739555 4921 generic.go:334] "Generic (PLEG): container finished" podID="1858d166-0eea-410c-ba8a-69f9968e698e" containerID="202f6a3dd4ed014edd57d29142e0cf0c000a01ab8f9c0595bf692fee1a0481b3" exitCode=0 Dec 10 13:15:14 crc kubenswrapper[4921]: I1210 13:15:14.740556 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" event={"ID":"1858d166-0eea-410c-ba8a-69f9968e698e","Type":"ContainerDied","Data":"202f6a3dd4ed014edd57d29142e0cf0c000a01ab8f9c0595bf692fee1a0481b3"} Dec 10 13:15:15 crc kubenswrapper[4921]: I1210 13:15:15.204889 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3" path="/var/lib/kubelet/pods/042f6d1a-e1d3-47ff-b2a5-b5c34b5063f3/volumes" Dec 10 13:15:15 crc kubenswrapper[4921]: I1210 13:15:15.749236 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerStarted","Data":"718ca4452cd2d02e4517002a13577a5bee05faf5f567d3d75fe0ac4c05ca195b"} Dec 10 13:15:16 crc kubenswrapper[4921]: I1210 13:15:16.710498 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:15:16 crc kubenswrapper[4921]: I1210 13:15:16.710807 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.387497 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.394246 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.401158 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.410080 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.415658 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.423507 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466038 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1858d166-0eea-410c-ba8a-69f9968e698e-operator-scripts\") pod \"1858d166-0eea-410c-ba8a-69f9968e698e\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466157 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a270057-fdbe-4726-9b34-6d42b47027fd-operator-scripts\") pod \"7a270057-fdbe-4726-9b34-6d42b47027fd\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466251 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsnmn\" (UniqueName: \"kubernetes.io/projected/7a270057-fdbe-4726-9b34-6d42b47027fd-kube-api-access-zsnmn\") pod \"7a270057-fdbe-4726-9b34-6d42b47027fd\" (UID: \"7a270057-fdbe-4726-9b34-6d42b47027fd\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466279 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgfjz\" (UniqueName: \"kubernetes.io/projected/1858d166-0eea-410c-ba8a-69f9968e698e-kube-api-access-fgfjz\") pod \"1858d166-0eea-410c-ba8a-69f9968e698e\" (UID: \"1858d166-0eea-410c-ba8a-69f9968e698e\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466471 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-operator-scripts\") pod \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466540 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd58b\" (UniqueName: \"kubernetes.io/projected/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-kube-api-access-zd58b\") pod \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\" (UID: \"bca7d85c-d634-42fd-95c4-1b8955ff1ac1\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466872 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a270057-fdbe-4726-9b34-6d42b47027fd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a270057-fdbe-4726-9b34-6d42b47027fd" (UID: "7a270057-fdbe-4726-9b34-6d42b47027fd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.466886 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1858d166-0eea-410c-ba8a-69f9968e698e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1858d166-0eea-410c-ba8a-69f9968e698e" (UID: "1858d166-0eea-410c-ba8a-69f9968e698e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.467847 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bca7d85c-d634-42fd-95c4-1b8955ff1ac1" (UID: "bca7d85c-d634-42fd-95c4-1b8955ff1ac1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.475610 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1858d166-0eea-410c-ba8a-69f9968e698e-kube-api-access-fgfjz" (OuterVolumeSpecName: "kube-api-access-fgfjz") pod "1858d166-0eea-410c-ba8a-69f9968e698e" (UID: "1858d166-0eea-410c-ba8a-69f9968e698e"). InnerVolumeSpecName "kube-api-access-fgfjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.495583 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a270057-fdbe-4726-9b34-6d42b47027fd-kube-api-access-zsnmn" (OuterVolumeSpecName: "kube-api-access-zsnmn") pod "7a270057-fdbe-4726-9b34-6d42b47027fd" (UID: "7a270057-fdbe-4726-9b34-6d42b47027fd"). InnerVolumeSpecName "kube-api-access-zsnmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.501356 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-kube-api-access-zd58b" (OuterVolumeSpecName: "kube-api-access-zd58b") pod "bca7d85c-d634-42fd-95c4-1b8955ff1ac1" (UID: "bca7d85c-d634-42fd-95c4-1b8955ff1ac1"). InnerVolumeSpecName "kube-api-access-zd58b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.567790 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d43add4e-46f3-4485-815e-363015a4d1b0-operator-scripts\") pod \"d43add4e-46f3-4485-815e-363015a4d1b0\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.567849 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scxlq\" (UniqueName: \"kubernetes.io/projected/0837454b-bf93-4505-b41e-6a6c7ddd5793-kube-api-access-scxlq\") pod \"0837454b-bf93-4505-b41e-6a6c7ddd5793\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.567924 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppxfz\" (UniqueName: \"kubernetes.io/projected/d43add4e-46f3-4485-815e-363015a4d1b0-kube-api-access-ppxfz\") pod \"d43add4e-46f3-4485-815e-363015a4d1b0\" (UID: \"d43add4e-46f3-4485-815e-363015a4d1b0\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.568001 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0837454b-bf93-4505-b41e-6a6c7ddd5793-operator-scripts\") pod \"0837454b-bf93-4505-b41e-6a6c7ddd5793\" (UID: \"0837454b-bf93-4505-b41e-6a6c7ddd5793\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.568025 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbsqd\" (UniqueName: \"kubernetes.io/projected/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-kube-api-access-fbsqd\") pod \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.568060 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-operator-scripts\") pod \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\" (UID: \"53d5a690-c61b-4dfd-be05-b5bfca7f4adf\") " Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.568327 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d43add4e-46f3-4485-815e-363015a4d1b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d43add4e-46f3-4485-815e-363015a4d1b0" (UID: "d43add4e-46f3-4485-815e-363015a4d1b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.568617 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "53d5a690-c61b-4dfd-be05-b5bfca7f4adf" (UID: "53d5a690-c61b-4dfd-be05-b5bfca7f4adf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.568973 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0837454b-bf93-4505-b41e-6a6c7ddd5793-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0837454b-bf93-4505-b41e-6a6c7ddd5793" (UID: "0837454b-bf93-4505-b41e-6a6c7ddd5793"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.569927 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd58b\" (UniqueName: \"kubernetes.io/projected/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-kube-api-access-zd58b\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570025 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d43add4e-46f3-4485-815e-363015a4d1b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570275 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1858d166-0eea-410c-ba8a-69f9968e698e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570360 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a270057-fdbe-4726-9b34-6d42b47027fd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570584 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0837454b-bf93-4505-b41e-6a6c7ddd5793-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570657 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsnmn\" (UniqueName: \"kubernetes.io/projected/7a270057-fdbe-4726-9b34-6d42b47027fd-kube-api-access-zsnmn\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570715 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.570810 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgfjz\" (UniqueName: \"kubernetes.io/projected/1858d166-0eea-410c-ba8a-69f9968e698e-kube-api-access-fgfjz\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.571180 4921 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca7d85c-d634-42fd-95c4-1b8955ff1ac1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.571158 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0837454b-bf93-4505-b41e-6a6c7ddd5793-kube-api-access-scxlq" (OuterVolumeSpecName: "kube-api-access-scxlq") pod "0837454b-bf93-4505-b41e-6a6c7ddd5793" (UID: "0837454b-bf93-4505-b41e-6a6c7ddd5793"). InnerVolumeSpecName "kube-api-access-scxlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.571488 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d43add4e-46f3-4485-815e-363015a4d1b0-kube-api-access-ppxfz" (OuterVolumeSpecName: "kube-api-access-ppxfz") pod "d43add4e-46f3-4485-815e-363015a4d1b0" (UID: "d43add4e-46f3-4485-815e-363015a4d1b0"). InnerVolumeSpecName "kube-api-access-ppxfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.572623 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-kube-api-access-fbsqd" (OuterVolumeSpecName: "kube-api-access-fbsqd") pod "53d5a690-c61b-4dfd-be05-b5bfca7f4adf" (UID: "53d5a690-c61b-4dfd-be05-b5bfca7f4adf"). InnerVolumeSpecName "kube-api-access-fbsqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.673444 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbsqd\" (UniqueName: \"kubernetes.io/projected/53d5a690-c61b-4dfd-be05-b5bfca7f4adf-kube-api-access-fbsqd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.673505 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scxlq\" (UniqueName: \"kubernetes.io/projected/0837454b-bf93-4505-b41e-6a6c7ddd5793-kube-api-access-scxlq\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.673519 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppxfz\" (UniqueName: \"kubernetes.io/projected/d43add4e-46f3-4485-815e-363015a4d1b0-kube-api-access-ppxfz\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.676034 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.765718 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqljd" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.766503 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pqljd" event={"ID":"d43add4e-46f3-4485-815e-363015a4d1b0","Type":"ContainerDied","Data":"e23ed0ed28ed417e88adc2f3a6f65df2a2cc1b5d736f7a105f26edbfc331292a"} Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.766608 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e23ed0ed28ed417e88adc2f3a6f65df2a2cc1b5d736f7a105f26edbfc331292a" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.767841 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rv5zq" event={"ID":"7a270057-fdbe-4726-9b34-6d42b47027fd","Type":"ContainerDied","Data":"15e560dba812be7613331013d003ee8a3da32d5869dbe34cff9c244d0dd6fa98"} Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.767955 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15e560dba812be7613331013d003ee8a3da32d5869dbe34cff9c244d0dd6fa98" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.768037 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rv5zq" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.769655 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-w99v8" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.769671 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-w99v8" event={"ID":"53d5a690-c61b-4dfd-be05-b5bfca7f4adf","Type":"ContainerDied","Data":"195a2ea9132ae92172fc3fc789876bc1e22bb36f7bcedfcdf152183cac7c2cfc"} Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.769900 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="195a2ea9132ae92172fc3fc789876bc1e22bb36f7bcedfcdf152183cac7c2cfc" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.771195 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" event={"ID":"bca7d85c-d634-42fd-95c4-1b8955ff1ac1","Type":"ContainerDied","Data":"87c03cfd55adf7cb905ef64c93e6ee6cbddefc4ef291a6589ed337b436ac6cb7"} Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.771224 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87c03cfd55adf7cb905ef64c93e6ee6cbddefc4ef291a6589ed337b436ac6cb7" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.771271 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-fa22-account-create-update-tzvqm" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.781594 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1e5d-account-create-update-27txp" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.782413 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1e5d-account-create-update-27txp" event={"ID":"0837454b-bf93-4505-b41e-6a6c7ddd5793","Type":"ContainerDied","Data":"e2a5d8440197d902d915550a1b34bf3ef8c481479fc5c0652b118fb9f446f212"} Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.782473 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2a5d8440197d902d915550a1b34bf3ef8c481479fc5c0652b118fb9f446f212" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.786167 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" event={"ID":"1858d166-0eea-410c-ba8a-69f9968e698e","Type":"ContainerDied","Data":"c281edf00ad416dd668656997aaf2fce10b6beca5234743c206977f466b90aec"} Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.786209 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c281edf00ad416dd668656997aaf2fce10b6beca5234743c206977f466b90aec" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.786271 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86e3-account-create-update-lp5pz" Dec 10 13:15:17 crc kubenswrapper[4921]: I1210 13:15:17.904912 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 10 13:15:18 crc kubenswrapper[4921]: I1210 13:15:18.798189 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerStarted","Data":"b6bcfff0da08eefbb8b38d10530a1a2aa39c4a59d590b3577fea7756894398ca"} Dec 10 13:15:19 crc kubenswrapper[4921]: I1210 13:15:19.809520 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerStarted","Data":"94e64cd64f79744e3a538102e5954cae82f3977d8f013eda083401993850f2bc"} Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.509894 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ftfd5"] Dec 10 13:15:20 crc kubenswrapper[4921]: E1210 13:15:20.510574 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a270057-fdbe-4726-9b34-6d42b47027fd" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510590 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a270057-fdbe-4726-9b34-6d42b47027fd" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: E1210 13:15:20.510600 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d5a690-c61b-4dfd-be05-b5bfca7f4adf" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510606 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d5a690-c61b-4dfd-be05-b5bfca7f4adf" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: E1210 13:15:20.510640 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d43add4e-46f3-4485-815e-363015a4d1b0" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510646 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="d43add4e-46f3-4485-815e-363015a4d1b0" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: E1210 13:15:20.510660 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bca7d85c-d634-42fd-95c4-1b8955ff1ac1" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510666 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bca7d85c-d634-42fd-95c4-1b8955ff1ac1" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: E1210 13:15:20.510676 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1858d166-0eea-410c-ba8a-69f9968e698e" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510681 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1858d166-0eea-410c-ba8a-69f9968e698e" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: E1210 13:15:20.510690 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0837454b-bf93-4505-b41e-6a6c7ddd5793" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510695 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0837454b-bf93-4505-b41e-6a6c7ddd5793" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510842 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="1858d166-0eea-410c-ba8a-69f9968e698e" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510857 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="bca7d85c-d634-42fd-95c4-1b8955ff1ac1" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510865 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d5a690-c61b-4dfd-be05-b5bfca7f4adf" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510873 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a270057-fdbe-4726-9b34-6d42b47027fd" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510896 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="d43add4e-46f3-4485-815e-363015a4d1b0" containerName="mariadb-database-create" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.510910 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0837454b-bf93-4505-b41e-6a6c7ddd5793" containerName="mariadb-account-create-update" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.511476 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.516282 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.516481 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.517635 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-qx9x8" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.517722 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ftfd5"] Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.572326 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-config-data\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.572367 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.572680 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-scripts\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.572722 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nft4r\" (UniqueName: \"kubernetes.io/projected/54815de8-8713-4fac-b91a-3ee4ecb8a068-kube-api-access-nft4r\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.673843 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-scripts\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.673908 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nft4r\" (UniqueName: \"kubernetes.io/projected/54815de8-8713-4fac-b91a-3ee4ecb8a068-kube-api-access-nft4r\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.673962 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-config-data\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.673996 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.680211 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-config-data\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.681794 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-scripts\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.682321 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.694840 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nft4r\" (UniqueName: \"kubernetes.io/projected/54815de8-8713-4fac-b91a-3ee4ecb8a068-kube-api-access-nft4r\") pod \"nova-cell0-conductor-db-sync-ftfd5\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:20 crc kubenswrapper[4921]: I1210 13:15:20.895514 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:21 crc kubenswrapper[4921]: I1210 13:15:21.335332 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:21 crc kubenswrapper[4921]: I1210 13:15:21.433713 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ftfd5"] Dec 10 13:15:21 crc kubenswrapper[4921]: W1210 13:15:21.436237 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54815de8_8713_4fac_b91a_3ee4ecb8a068.slice/crio-3852d8174c06426de77772712416c187b7ac124020f73f356f05321e3c459226 WatchSource:0}: Error finding container 3852d8174c06426de77772712416c187b7ac124020f73f356f05321e3c459226: Status 404 returned error can't find the container with id 3852d8174c06426de77772712416c187b7ac124020f73f356f05321e3c459226 Dec 10 13:15:21 crc kubenswrapper[4921]: I1210 13:15:21.827567 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" event={"ID":"54815de8-8713-4fac-b91a-3ee4ecb8a068","Type":"ContainerStarted","Data":"3852d8174c06426de77772712416c187b7ac124020f73f356f05321e3c459226"} Dec 10 13:15:24 crc kubenswrapper[4921]: I1210 13:15:24.856584 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerStarted","Data":"cfbc9f0c233a5f60383d092e8aee019a1d651991ebafe78a8b96255dfc0cca43"} Dec 10 13:15:25 crc kubenswrapper[4921]: I1210 13:15:25.863330 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:15:25 crc kubenswrapper[4921]: I1210 13:15:25.863360 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-notification-agent" containerID="cri-o://b6bcfff0da08eefbb8b38d10530a1a2aa39c4a59d590b3577fea7756894398ca" gracePeriod=30 Dec 10 13:15:25 crc kubenswrapper[4921]: I1210 13:15:25.864044 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="sg-core" containerID="cri-o://94e64cd64f79744e3a538102e5954cae82f3977d8f013eda083401993850f2bc" gracePeriod=30 Dec 10 13:15:25 crc kubenswrapper[4921]: I1210 13:15:25.864114 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="proxy-httpd" containerID="cri-o://cfbc9f0c233a5f60383d092e8aee019a1d651991ebafe78a8b96255dfc0cca43" gracePeriod=30 Dec 10 13:15:25 crc kubenswrapper[4921]: I1210 13:15:25.864170 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-central-agent" containerID="cri-o://718ca4452cd2d02e4517002a13577a5bee05faf5f567d3d75fe0ac4c05ca195b" gracePeriod=30 Dec 10 13:15:25 crc kubenswrapper[4921]: I1210 13:15:25.901408 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.086305778 podStartE2EDuration="12.901377707s" podCreationTimestamp="2025-12-10 13:15:13 +0000 UTC" firstStartedPulling="2025-12-10 13:15:14.655652163 +0000 UTC m=+1111.871874087" lastFinishedPulling="2025-12-10 13:15:24.470724092 +0000 UTC m=+1121.686946016" observedRunningTime="2025-12-10 13:15:25.891820731 +0000 UTC m=+1123.108042655" watchObservedRunningTime="2025-12-10 13:15:25.901377707 +0000 UTC m=+1123.117599631" Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876046 4921 generic.go:334] "Generic (PLEG): container finished" podID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerID="cfbc9f0c233a5f60383d092e8aee019a1d651991ebafe78a8b96255dfc0cca43" exitCode=0 Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876358 4921 generic.go:334] "Generic (PLEG): container finished" podID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerID="94e64cd64f79744e3a538102e5954cae82f3977d8f013eda083401993850f2bc" exitCode=2 Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876368 4921 generic.go:334] "Generic (PLEG): container finished" podID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerID="b6bcfff0da08eefbb8b38d10530a1a2aa39c4a59d590b3577fea7756894398ca" exitCode=0 Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876119 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerDied","Data":"cfbc9f0c233a5f60383d092e8aee019a1d651991ebafe78a8b96255dfc0cca43"} Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876453 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerDied","Data":"94e64cd64f79744e3a538102e5954cae82f3977d8f013eda083401993850f2bc"} Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876480 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerDied","Data":"b6bcfff0da08eefbb8b38d10530a1a2aa39c4a59d590b3577fea7756894398ca"} Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876377 4921 generic.go:334] "Generic (PLEG): container finished" podID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerID="718ca4452cd2d02e4517002a13577a5bee05faf5f567d3d75fe0ac4c05ca195b" exitCode=0 Dec 10 13:15:26 crc kubenswrapper[4921]: I1210 13:15:26.876491 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerDied","Data":"718ca4452cd2d02e4517002a13577a5bee05faf5f567d3d75fe0ac4c05ca195b"} Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.665149 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820469 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-scripts\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820556 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-config-data\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820623 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-combined-ca-bundle\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820656 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-run-httpd\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820694 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-sg-core-conf-yaml\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820718 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-log-httpd\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.820745 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hd9xw\" (UniqueName: \"kubernetes.io/projected/b3feec4c-2ede-41fb-874b-47de4c9ff913-kube-api-access-hd9xw\") pod \"b3feec4c-2ede-41fb-874b-47de4c9ff913\" (UID: \"b3feec4c-2ede-41fb-874b-47de4c9ff913\") " Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.821287 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.821554 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.827726 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-scripts" (OuterVolumeSpecName: "scripts") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.827902 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3feec4c-2ede-41fb-874b-47de4c9ff913-kube-api-access-hd9xw" (OuterVolumeSpecName: "kube-api-access-hd9xw") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "kube-api-access-hd9xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.849464 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.901216 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b3feec4c-2ede-41fb-874b-47de4c9ff913","Type":"ContainerDied","Data":"e2f8ac05a0aaba09ed3722abe0c9be6c6c8d50fae91d6dadc0c405a82ef39ac1"} Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.901278 4921 scope.go:117] "RemoveContainer" containerID="cfbc9f0c233a5f60383d092e8aee019a1d651991ebafe78a8b96255dfc0cca43" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.901289 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.916940 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.923761 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.923900 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.923989 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.924044 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b3feec4c-2ede-41fb-874b-47de4c9ff913-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.924104 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hd9xw\" (UniqueName: \"kubernetes.io/projected/b3feec4c-2ede-41fb-874b-47de4c9ff913-kube-api-access-hd9xw\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.924170 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.941742 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-config-data" (OuterVolumeSpecName: "config-data") pod "b3feec4c-2ede-41fb-874b-47de4c9ff913" (UID: "b3feec4c-2ede-41fb-874b-47de4c9ff913"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:27 crc kubenswrapper[4921]: I1210 13:15:27.986028 4921 scope.go:117] "RemoveContainer" containerID="94e64cd64f79744e3a538102e5954cae82f3977d8f013eda083401993850f2bc" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.006165 4921 scope.go:117] "RemoveContainer" containerID="b6bcfff0da08eefbb8b38d10530a1a2aa39c4a59d590b3577fea7756894398ca" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.025864 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3feec4c-2ede-41fb-874b-47de4c9ff913-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.026077 4921 scope.go:117] "RemoveContainer" containerID="718ca4452cd2d02e4517002a13577a5bee05faf5f567d3d75fe0ac4c05ca195b" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.256580 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.263889 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.288865 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:28 crc kubenswrapper[4921]: E1210 13:15:28.289980 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-notification-agent" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.290076 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-notification-agent" Dec 10 13:15:28 crc kubenswrapper[4921]: E1210 13:15:28.290179 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-central-agent" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.290248 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-central-agent" Dec 10 13:15:28 crc kubenswrapper[4921]: E1210 13:15:28.290336 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="proxy-httpd" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.290423 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="proxy-httpd" Dec 10 13:15:28 crc kubenswrapper[4921]: E1210 13:15:28.290537 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="sg-core" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.290610 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="sg-core" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.290885 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="sg-core" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.290963 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-central-agent" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.291040 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="proxy-httpd" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.291098 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" containerName="ceilometer-notification-agent" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.293278 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.313605 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.314518 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.318156 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.431653 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-config-data\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.431713 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-log-httpd\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.431764 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-729mc\" (UniqueName: \"kubernetes.io/projected/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-kube-api-access-729mc\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.431804 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-scripts\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.432028 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-run-httpd\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.432097 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.432208 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534059 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-config-data\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534119 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-log-httpd\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534169 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-729mc\" (UniqueName: \"kubernetes.io/projected/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-kube-api-access-729mc\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534213 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-scripts\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534249 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534263 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-run-httpd\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534292 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.534882 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-run-httpd\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.535123 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-log-httpd\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.541281 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-scripts\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.541849 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.544875 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.545220 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-config-data\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.556512 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-729mc\" (UniqueName: \"kubernetes.io/projected/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-kube-api-access-729mc\") pod \"ceilometer-0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " pod="openstack/ceilometer-0" Dec 10 13:15:28 crc kubenswrapper[4921]: I1210 13:15:28.626524 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:29 crc kubenswrapper[4921]: I1210 13:15:29.208290 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3feec4c-2ede-41fb-874b-47de4c9ff913" path="/var/lib/kubelet/pods/b3feec4c-2ede-41fb-874b-47de4c9ff913/volumes" Dec 10 13:15:30 crc kubenswrapper[4921]: I1210 13:15:30.526206 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:33 crc kubenswrapper[4921]: W1210 13:15:33.780540 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8a5c73f_71a0_4fe2_8d00_aede994aabb0.slice/crio-fb76c6a5f2dd2306aeb1883519d89dd4ad761acb4aa194b34573798e42ed92d5 WatchSource:0}: Error finding container fb76c6a5f2dd2306aeb1883519d89dd4ad761acb4aa194b34573798e42ed92d5: Status 404 returned error can't find the container with id fb76c6a5f2dd2306aeb1883519d89dd4ad761acb4aa194b34573798e42ed92d5 Dec 10 13:15:33 crc kubenswrapper[4921]: I1210 13:15:33.791215 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:33 crc kubenswrapper[4921]: I1210 13:15:33.956478 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerStarted","Data":"fb76c6a5f2dd2306aeb1883519d89dd4ad761acb4aa194b34573798e42ed92d5"} Dec 10 13:15:33 crc kubenswrapper[4921]: I1210 13:15:33.958899 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" event={"ID":"54815de8-8713-4fac-b91a-3ee4ecb8a068","Type":"ContainerStarted","Data":"5f3a96948da2e8010c78c048ec4bbc5f1566e6effc5adcd18ccbd75b69386c1f"} Dec 10 13:15:33 crc kubenswrapper[4921]: I1210 13:15:33.979508 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" podStartSLOduration=2.006860338 podStartE2EDuration="13.979488541s" podCreationTimestamp="2025-12-10 13:15:20 +0000 UTC" firstStartedPulling="2025-12-10 13:15:21.438320561 +0000 UTC m=+1118.654542485" lastFinishedPulling="2025-12-10 13:15:33.410948764 +0000 UTC m=+1130.627170688" observedRunningTime="2025-12-10 13:15:33.978829003 +0000 UTC m=+1131.195050937" watchObservedRunningTime="2025-12-10 13:15:33.979488541 +0000 UTC m=+1131.195710475" Dec 10 13:15:34 crc kubenswrapper[4921]: I1210 13:15:34.998835 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerStarted","Data":"04d2b0152557bff4c262c6ece2578206242c0ff9eb5e60df2b92653f72bf0ff7"} Dec 10 13:15:36 crc kubenswrapper[4921]: I1210 13:15:36.007271 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerStarted","Data":"3a6ce5364e2125cae48a3769556f1205b5289746cebcd7c28690975af82797f9"} Dec 10 13:15:37 crc kubenswrapper[4921]: I1210 13:15:37.025359 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerStarted","Data":"27919d7942f1e22addbffb7e5058684afa405e57a2b6d8a950e7e953cb0448eb"} Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.037151 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerStarted","Data":"a0e031ea543d1bd63cb3975762b7e613de6eb5c5134750da8ee0981e98cd900b"} Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.037305 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-central-agent" containerID="cri-o://04d2b0152557bff4c262c6ece2578206242c0ff9eb5e60df2b92653f72bf0ff7" gracePeriod=30 Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.037362 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="sg-core" containerID="cri-o://27919d7942f1e22addbffb7e5058684afa405e57a2b6d8a950e7e953cb0448eb" gracePeriod=30 Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.037419 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="proxy-httpd" containerID="cri-o://a0e031ea543d1bd63cb3975762b7e613de6eb5c5134750da8ee0981e98cd900b" gracePeriod=30 Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.037434 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-notification-agent" containerID="cri-o://3a6ce5364e2125cae48a3769556f1205b5289746cebcd7c28690975af82797f9" gracePeriod=30 Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.037721 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:15:38 crc kubenswrapper[4921]: I1210 13:15:38.065047 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.244558455 podStartE2EDuration="10.065030316s" podCreationTimestamp="2025-12-10 13:15:28 +0000 UTC" firstStartedPulling="2025-12-10 13:15:33.782988797 +0000 UTC m=+1130.999210721" lastFinishedPulling="2025-12-10 13:15:37.603460658 +0000 UTC m=+1134.819682582" observedRunningTime="2025-12-10 13:15:38.061342117 +0000 UTC m=+1135.277564041" watchObservedRunningTime="2025-12-10 13:15:38.065030316 +0000 UTC m=+1135.281252240" Dec 10 13:15:39 crc kubenswrapper[4921]: I1210 13:15:39.077951 4921 generic.go:334] "Generic (PLEG): container finished" podID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerID="a0e031ea543d1bd63cb3975762b7e613de6eb5c5134750da8ee0981e98cd900b" exitCode=0 Dec 10 13:15:39 crc kubenswrapper[4921]: I1210 13:15:39.078717 4921 generic.go:334] "Generic (PLEG): container finished" podID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerID="27919d7942f1e22addbffb7e5058684afa405e57a2b6d8a950e7e953cb0448eb" exitCode=2 Dec 10 13:15:39 crc kubenswrapper[4921]: I1210 13:15:39.078742 4921 generic.go:334] "Generic (PLEG): container finished" podID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerID="3a6ce5364e2125cae48a3769556f1205b5289746cebcd7c28690975af82797f9" exitCode=0 Dec 10 13:15:39 crc kubenswrapper[4921]: I1210 13:15:39.078517 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerDied","Data":"a0e031ea543d1bd63cb3975762b7e613de6eb5c5134750da8ee0981e98cd900b"} Dec 10 13:15:39 crc kubenswrapper[4921]: I1210 13:15:39.078797 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerDied","Data":"27919d7942f1e22addbffb7e5058684afa405e57a2b6d8a950e7e953cb0448eb"} Dec 10 13:15:39 crc kubenswrapper[4921]: I1210 13:15:39.078824 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerDied","Data":"3a6ce5364e2125cae48a3769556f1205b5289746cebcd7c28690975af82797f9"} Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.114453 4921 generic.go:334] "Generic (PLEG): container finished" podID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerID="04d2b0152557bff4c262c6ece2578206242c0ff9eb5e60df2b92653f72bf0ff7" exitCode=0 Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.114972 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerDied","Data":"04d2b0152557bff4c262c6ece2578206242c0ff9eb5e60df2b92653f72bf0ff7"} Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.207473 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.391754 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-scripts\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.391830 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-729mc\" (UniqueName: \"kubernetes.io/projected/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-kube-api-access-729mc\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.391857 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-log-httpd\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.391944 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-config-data\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.392707 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.393188 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-run-httpd\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.393604 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-sg-core-conf-yaml\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.393637 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-combined-ca-bundle\") pod \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\" (UID: \"c8a5c73f-71a0-4fe2-8d00-aede994aabb0\") " Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.394200 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.394603 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.400821 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-scripts" (OuterVolumeSpecName: "scripts") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.402307 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-kube-api-access-729mc" (OuterVolumeSpecName: "kube-api-access-729mc") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "kube-api-access-729mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.435634 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.491580 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.496031 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.496068 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.496082 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.496096 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.496110 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-729mc\" (UniqueName: \"kubernetes.io/projected/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-kube-api-access-729mc\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.514096 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-config-data" (OuterVolumeSpecName: "config-data") pod "c8a5c73f-71a0-4fe2-8d00-aede994aabb0" (UID: "c8a5c73f-71a0-4fe2-8d00-aede994aabb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:42 crc kubenswrapper[4921]: I1210 13:15:42.597576 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8a5c73f-71a0-4fe2-8d00-aede994aabb0-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.127188 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c8a5c73f-71a0-4fe2-8d00-aede994aabb0","Type":"ContainerDied","Data":"fb76c6a5f2dd2306aeb1883519d89dd4ad761acb4aa194b34573798e42ed92d5"} Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.127244 4921 scope.go:117] "RemoveContainer" containerID="a0e031ea543d1bd63cb3975762b7e613de6eb5c5134750da8ee0981e98cd900b" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.127420 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.167924 4921 scope.go:117] "RemoveContainer" containerID="27919d7942f1e22addbffb7e5058684afa405e57a2b6d8a950e7e953cb0448eb" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.214200 4921 scope.go:117] "RemoveContainer" containerID="3a6ce5364e2125cae48a3769556f1205b5289746cebcd7c28690975af82797f9" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.232676 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.232716 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.245632 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:43 crc kubenswrapper[4921]: E1210 13:15:43.245968 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-notification-agent" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.245984 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-notification-agent" Dec 10 13:15:43 crc kubenswrapper[4921]: E1210 13:15:43.246010 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-central-agent" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246018 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-central-agent" Dec 10 13:15:43 crc kubenswrapper[4921]: E1210 13:15:43.246029 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="proxy-httpd" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246035 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="proxy-httpd" Dec 10 13:15:43 crc kubenswrapper[4921]: E1210 13:15:43.246040 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="sg-core" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246046 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="sg-core" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246195 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="proxy-httpd" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246212 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="sg-core" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246222 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-notification-agent" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.246230 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" containerName="ceilometer-central-agent" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.247591 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.248459 4921 scope.go:117] "RemoveContainer" containerID="04d2b0152557bff4c262c6ece2578206242c0ff9eb5e60df2b92653f72bf0ff7" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.255424 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.261973 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.262151 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311145 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311311 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-log-httpd\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311413 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-run-httpd\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311643 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-scripts\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311713 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-594x6\" (UniqueName: \"kubernetes.io/projected/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-kube-api-access-594x6\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311781 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-config-data\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.311805 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413339 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-scripts\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413417 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-594x6\" (UniqueName: \"kubernetes.io/projected/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-kube-api-access-594x6\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413462 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-config-data\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413478 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413501 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413546 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-log-httpd\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.413909 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-run-httpd\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.414430 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-run-httpd\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.414465 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-log-httpd\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.417027 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-scripts\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.417960 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.419258 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-config-data\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.426237 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.436350 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-594x6\" (UniqueName: \"kubernetes.io/projected/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-kube-api-access-594x6\") pod \"ceilometer-0\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " pod="openstack/ceilometer-0" Dec 10 13:15:43 crc kubenswrapper[4921]: I1210 13:15:43.590431 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:15:44 crc kubenswrapper[4921]: I1210 13:15:44.038991 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:15:44 crc kubenswrapper[4921]: I1210 13:15:44.140671 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerStarted","Data":"64d30dd77afac4a57129603cada4ac74c859a4f6e72dce3c54f66c15b0b0b337"} Dec 10 13:15:45 crc kubenswrapper[4921]: I1210 13:15:45.150798 4921 generic.go:334] "Generic (PLEG): container finished" podID="54815de8-8713-4fac-b91a-3ee4ecb8a068" containerID="5f3a96948da2e8010c78c048ec4bbc5f1566e6effc5adcd18ccbd75b69386c1f" exitCode=0 Dec 10 13:15:45 crc kubenswrapper[4921]: I1210 13:15:45.150879 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" event={"ID":"54815de8-8713-4fac-b91a-3ee4ecb8a068","Type":"ContainerDied","Data":"5f3a96948da2e8010c78c048ec4bbc5f1566e6effc5adcd18ccbd75b69386c1f"} Dec 10 13:15:45 crc kubenswrapper[4921]: I1210 13:15:45.152885 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerStarted","Data":"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330"} Dec 10 13:15:45 crc kubenswrapper[4921]: I1210 13:15:45.202372 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8a5c73f-71a0-4fe2-8d00-aede994aabb0" path="/var/lib/kubelet/pods/c8a5c73f-71a0-4fe2-8d00-aede994aabb0/volumes" Dec 10 13:15:46 crc kubenswrapper[4921]: I1210 13:15:46.711255 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:15:46 crc kubenswrapper[4921]: I1210 13:15:46.711604 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:15:46 crc kubenswrapper[4921]: I1210 13:15:46.711650 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:15:46 crc kubenswrapper[4921]: I1210 13:15:46.712359 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4971420ec666b0633f92ea9dfa8a109dd0d7730fedda43f31a5af62a9d620d9b"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:15:46 crc kubenswrapper[4921]: I1210 13:15:46.712488 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://4971420ec666b0633f92ea9dfa8a109dd0d7730fedda43f31a5af62a9d620d9b" gracePeriod=600 Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.005499 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.174982 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.175287 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ftfd5" event={"ID":"54815de8-8713-4fac-b91a-3ee4ecb8a068","Type":"ContainerDied","Data":"3852d8174c06426de77772712416c187b7ac124020f73f356f05321e3c459226"} Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.176233 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3852d8174c06426de77772712416c187b7ac124020f73f356f05321e3c459226" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.176703 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-scripts\") pod \"54815de8-8713-4fac-b91a-3ee4ecb8a068\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.178043 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-config-data\") pod \"54815de8-8713-4fac-b91a-3ee4ecb8a068\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.178091 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-combined-ca-bundle\") pod \"54815de8-8713-4fac-b91a-3ee4ecb8a068\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.178128 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nft4r\" (UniqueName: \"kubernetes.io/projected/54815de8-8713-4fac-b91a-3ee4ecb8a068-kube-api-access-nft4r\") pod \"54815de8-8713-4fac-b91a-3ee4ecb8a068\" (UID: \"54815de8-8713-4fac-b91a-3ee4ecb8a068\") " Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.182608 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerStarted","Data":"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6"} Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.186990 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-scripts" (OuterVolumeSpecName: "scripts") pod "54815de8-8713-4fac-b91a-3ee4ecb8a068" (UID: "54815de8-8713-4fac-b91a-3ee4ecb8a068"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.191146 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54815de8-8713-4fac-b91a-3ee4ecb8a068-kube-api-access-nft4r" (OuterVolumeSpecName: "kube-api-access-nft4r") pod "54815de8-8713-4fac-b91a-3ee4ecb8a068" (UID: "54815de8-8713-4fac-b91a-3ee4ecb8a068"). InnerVolumeSpecName "kube-api-access-nft4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.198843 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="4971420ec666b0633f92ea9dfa8a109dd0d7730fedda43f31a5af62a9d620d9b" exitCode=0 Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.198890 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"4971420ec666b0633f92ea9dfa8a109dd0d7730fedda43f31a5af62a9d620d9b"} Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.198937 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"dded596c5a06c9f34bbe49927b6ba36b53f24e08c9e890c58e9f5dbee945fc5f"} Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.198955 4921 scope.go:117] "RemoveContainer" containerID="8d11980c56c7b436b6d741535cc469b576e206b3de67a362d6c36f2e03055365" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.243936 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54815de8-8713-4fac-b91a-3ee4ecb8a068" (UID: "54815de8-8713-4fac-b91a-3ee4ecb8a068"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.280893 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.280931 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nft4r\" (UniqueName: \"kubernetes.io/projected/54815de8-8713-4fac-b91a-3ee4ecb8a068-kube-api-access-nft4r\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.280946 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.296545 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-config-data" (OuterVolumeSpecName: "config-data") pod "54815de8-8713-4fac-b91a-3ee4ecb8a068" (UID: "54815de8-8713-4fac-b91a-3ee4ecb8a068"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.301829 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 13:15:47 crc kubenswrapper[4921]: E1210 13:15:47.302232 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54815de8-8713-4fac-b91a-3ee4ecb8a068" containerName="nova-cell0-conductor-db-sync" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.302254 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="54815de8-8713-4fac-b91a-3ee4ecb8a068" containerName="nova-cell0-conductor-db-sync" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.302572 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="54815de8-8713-4fac-b91a-3ee4ecb8a068" containerName="nova-cell0-conductor-db-sync" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.303984 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.340597 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.382640 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54815de8-8713-4fac-b91a-3ee4ecb8a068-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.484114 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b24hk\" (UniqueName: \"kubernetes.io/projected/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-kube-api-access-b24hk\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.484217 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.484247 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.585916 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.586033 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b24hk\" (UniqueName: \"kubernetes.io/projected/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-kube-api-access-b24hk\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.586100 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.590144 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.590919 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.604483 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b24hk\" (UniqueName: \"kubernetes.io/projected/c3e41b45-0540-41a8-9b47-4b86ffd26dc1-kube-api-access-b24hk\") pod \"nova-cell0-conductor-0\" (UID: \"c3e41b45-0540-41a8-9b47-4b86ffd26dc1\") " pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:47 crc kubenswrapper[4921]: I1210 13:15:47.643297 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:48 crc kubenswrapper[4921]: W1210 13:15:48.161663 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3e41b45_0540_41a8_9b47_4b86ffd26dc1.slice/crio-596566ed026e2b778b8788b4cf563e9b98f911d07d635709bd5437f515ebc777 WatchSource:0}: Error finding container 596566ed026e2b778b8788b4cf563e9b98f911d07d635709bd5437f515ebc777: Status 404 returned error can't find the container with id 596566ed026e2b778b8788b4cf563e9b98f911d07d635709bd5437f515ebc777 Dec 10 13:15:48 crc kubenswrapper[4921]: I1210 13:15:48.167953 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 13:15:48 crc kubenswrapper[4921]: I1210 13:15:48.211725 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c3e41b45-0540-41a8-9b47-4b86ffd26dc1","Type":"ContainerStarted","Data":"596566ed026e2b778b8788b4cf563e9b98f911d07d635709bd5437f515ebc777"} Dec 10 13:15:48 crc kubenswrapper[4921]: I1210 13:15:48.228250 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerStarted","Data":"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368"} Dec 10 13:15:49 crc kubenswrapper[4921]: I1210 13:15:49.270164 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c3e41b45-0540-41a8-9b47-4b86ffd26dc1","Type":"ContainerStarted","Data":"ebaad70f7dfae87beb3a45848d9e9b1b76e599cf763547fea6d7dcd1de64f8b4"} Dec 10 13:15:49 crc kubenswrapper[4921]: I1210 13:15:49.281488 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerStarted","Data":"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b"} Dec 10 13:15:49 crc kubenswrapper[4921]: I1210 13:15:49.281533 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:49 crc kubenswrapper[4921]: I1210 13:15:49.281548 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:15:49 crc kubenswrapper[4921]: I1210 13:15:49.309636 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.309609321 podStartE2EDuration="2.309609321s" podCreationTimestamp="2025-12-10 13:15:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:15:49.287632571 +0000 UTC m=+1146.503854495" watchObservedRunningTime="2025-12-10 13:15:49.309609321 +0000 UTC m=+1146.525831245" Dec 10 13:15:49 crc kubenswrapper[4921]: I1210 13:15:49.325523 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9766589570000002 podStartE2EDuration="6.325493737s" podCreationTimestamp="2025-12-10 13:15:43 +0000 UTC" firstStartedPulling="2025-12-10 13:15:44.047610844 +0000 UTC m=+1141.263832768" lastFinishedPulling="2025-12-10 13:15:48.396445624 +0000 UTC m=+1145.612667548" observedRunningTime="2025-12-10 13:15:49.312237532 +0000 UTC m=+1146.528459466" watchObservedRunningTime="2025-12-10 13:15:49.325493737 +0000 UTC m=+1146.541715661" Dec 10 13:15:57 crc kubenswrapper[4921]: I1210 13:15:57.667906 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.203910 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-69vnb"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.205547 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.208090 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.209907 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.226832 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-69vnb"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.292061 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.292346 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-config-data\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.292463 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-scripts\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.292591 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2bxk\" (UniqueName: \"kubernetes.io/projected/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-kube-api-access-q2bxk\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.393654 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-config-data\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.393951 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-scripts\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.394066 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2bxk\" (UniqueName: \"kubernetes.io/projected/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-kube-api-access-q2bxk\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.394192 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.404456 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.406600 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-config-data\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.427619 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-scripts\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.429969 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2bxk\" (UniqueName: \"kubernetes.io/projected/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-kube-api-access-q2bxk\") pod \"nova-cell0-cell-mapping-69vnb\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.474876 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.476342 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.505592 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.507118 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.535555 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.540331 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.554455 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.563347 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.587941 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598423 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgtxb\" (UniqueName: \"kubernetes.io/projected/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-kube-api-access-cgtxb\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598461 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99rpl\" (UniqueName: \"kubernetes.io/projected/b4f67c57-1639-4769-bdc2-22991d1a145d-kube-api-access-99rpl\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598488 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-config-data\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598507 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598550 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598601 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-config-data\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598616 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-logs\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.598639 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4f67c57-1639-4769-bdc2-22991d1a145d-logs\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.632544 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.633937 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.639293 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.678637 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.684932 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.690681 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.691812 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703072 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703151 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-config-data\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703173 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-logs\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703199 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4f67c57-1639-4769-bdc2-22991d1a145d-logs\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703248 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgtxb\" (UniqueName: \"kubernetes.io/projected/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-kube-api-access-cgtxb\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703263 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99rpl\" (UniqueName: \"kubernetes.io/projected/b4f67c57-1639-4769-bdc2-22991d1a145d-kube-api-access-99rpl\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703285 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-config-data\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.703306 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.704090 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-logs\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.704258 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4f67c57-1639-4769-bdc2-22991d1a145d-logs\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.742697 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.749164 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.764896 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgtxb\" (UniqueName: \"kubernetes.io/projected/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-kube-api-access-cgtxb\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.766408 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99rpl\" (UniqueName: \"kubernetes.io/projected/b4f67c57-1639-4769-bdc2-22991d1a145d-kube-api-access-99rpl\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.767188 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-config-data\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.767435 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-config-data\") pod \"nova-api-0\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.767784 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.802031 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.807870 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.807936 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-config-data\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.808010 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2ll7\" (UniqueName: \"kubernetes.io/projected/707274db-73e2-467c-9689-3712a3e404b6-kube-api-access-b2ll7\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.808061 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.808133 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.808172 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cshbk\" (UniqueName: \"kubernetes.io/projected/96c81abc-1c0b-47f6-9af8-bbf58960255d-kube-api-access-cshbk\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.824835 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.828961 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-c6mhs"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.830506 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.897447 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-c6mhs"] Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.911957 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912001 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912027 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-config-data\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912073 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2ll7\" (UniqueName: \"kubernetes.io/projected/707274db-73e2-467c-9689-3712a3e404b6-kube-api-access-b2ll7\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912108 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912128 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912165 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912191 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cshbk\" (UniqueName: \"kubernetes.io/projected/96c81abc-1c0b-47f6-9af8-bbf58960255d-kube-api-access-cshbk\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912227 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912243 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wqjs\" (UniqueName: \"kubernetes.io/projected/82c589d7-8e06-40e1-9092-d63e99022767-kube-api-access-5wqjs\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.912266 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-config\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.917608 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.922424 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.924884 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-config-data\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.925245 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.936000 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cshbk\" (UniqueName: \"kubernetes.io/projected/96c81abc-1c0b-47f6-9af8-bbf58960255d-kube-api-access-cshbk\") pod \"nova-scheduler-0\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " pod="openstack/nova-scheduler-0" Dec 10 13:15:58 crc kubenswrapper[4921]: I1210 13:15:58.936556 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2ll7\" (UniqueName: \"kubernetes.io/projected/707274db-73e2-467c-9689-3712a3e404b6-kube-api-access-b2ll7\") pod \"nova-cell1-novncproxy-0\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.016139 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.016234 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.016254 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wqjs\" (UniqueName: \"kubernetes.io/projected/82c589d7-8e06-40e1-9092-d63e99022767-kube-api-access-5wqjs\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.016277 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-config\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.016298 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.017150 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.017685 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.018306 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.018553 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-config\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.042683 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wqjs\" (UniqueName: \"kubernetes.io/projected/82c589d7-8e06-40e1-9092-d63e99022767-kube-api-access-5wqjs\") pod \"dnsmasq-dns-8b8cf6657-c6mhs\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.094723 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.110808 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.172236 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.290777 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-69vnb"] Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.382883 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-69vnb" event={"ID":"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c","Type":"ContainerStarted","Data":"6b081810060afd190bba8d014048bb431a7825b7a6fd9959e19ce8f89a174c75"} Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.516317 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.820560 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.871438 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:15:59 crc kubenswrapper[4921]: I1210 13:15:59.895111 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:00 crc kubenswrapper[4921]: W1210 13:16:00.042648 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82c589d7_8e06_40e1_9092_d63e99022767.slice/crio-88d7d0c7cb11e8267b54395fcf6f6868b7e54a78cba9b040d61ee5d181a05b1f WatchSource:0}: Error finding container 88d7d0c7cb11e8267b54395fcf6f6868b7e54a78cba9b040d61ee5d181a05b1f: Status 404 returned error can't find the container with id 88d7d0c7cb11e8267b54395fcf6f6868b7e54a78cba9b040d61ee5d181a05b1f Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.046122 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-c6mhs"] Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.277321 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svrtf"] Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.279733 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.282145 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.284438 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.293743 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svrtf"] Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.393178 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4f67c57-1639-4769-bdc2-22991d1a145d","Type":"ContainerStarted","Data":"265bef2c516ad2db79265fd5a1043516c537ccf4d3bf32b4274fb88990fe27f4"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.394613 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-69vnb" event={"ID":"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c","Type":"ContainerStarted","Data":"f8d9026a39c7ffd4d66826e57d9fc56bfcd85fc8e815a1ac41ed39bbbac753ac"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.398428 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd","Type":"ContainerStarted","Data":"3aa40aac0b9e7c1231b794fb7c8542e5661e15348faac8939e470842a2005abc"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.400033 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"707274db-73e2-467c-9689-3712a3e404b6","Type":"ContainerStarted","Data":"b6bb9831d8f240e302b40ffedf557256d32d2f33e04fdb095b550a1b2ebd9d2a"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.401445 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"96c81abc-1c0b-47f6-9af8-bbf58960255d","Type":"ContainerStarted","Data":"ea859c598fcb013f84a8240036ee8ede6dcb9049a8ab3d1e86044ce767982949"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.424532 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-69vnb" podStartSLOduration=2.424508934 podStartE2EDuration="2.424508934s" podCreationTimestamp="2025-12-10 13:15:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:00.410328964 +0000 UTC m=+1157.626550898" watchObservedRunningTime="2025-12-10 13:16:00.424508934 +0000 UTC m=+1157.640730858" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.434913 4921 generic.go:334] "Generic (PLEG): container finished" podID="82c589d7-8e06-40e1-9092-d63e99022767" containerID="69ca2c0fa0b7501664deec4c5c0dea103a84d18b98aef08521c8c2c0898fc1b0" exitCode=0 Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.435322 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" event={"ID":"82c589d7-8e06-40e1-9092-d63e99022767","Type":"ContainerDied","Data":"69ca2c0fa0b7501664deec4c5c0dea103a84d18b98aef08521c8c2c0898fc1b0"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.435349 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" event={"ID":"82c589d7-8e06-40e1-9092-d63e99022767","Type":"ContainerStarted","Data":"88d7d0c7cb11e8267b54395fcf6f6868b7e54a78cba9b040d61ee5d181a05b1f"} Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.445490 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-scripts\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.445547 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.445595 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-config-data\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.445648 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk9h2\" (UniqueName: \"kubernetes.io/projected/38908ed6-c218-40db-8cec-eed17dbde6e4-kube-api-access-lk9h2\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.547669 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-scripts\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.547733 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.547803 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-config-data\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.547878 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk9h2\" (UniqueName: \"kubernetes.io/projected/38908ed6-c218-40db-8cec-eed17dbde6e4-kube-api-access-lk9h2\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.556288 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.556703 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-scripts\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.558693 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-config-data\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.572291 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk9h2\" (UniqueName: \"kubernetes.io/projected/38908ed6-c218-40db-8cec-eed17dbde6e4-kube-api-access-lk9h2\") pod \"nova-cell1-conductor-db-sync-svrtf\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:00 crc kubenswrapper[4921]: I1210 13:16:00.612330 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.126428 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svrtf"] Dec 10 13:16:01 crc kubenswrapper[4921]: W1210 13:16:01.149503 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38908ed6_c218_40db_8cec_eed17dbde6e4.slice/crio-db24319c583bb3c13167095918d104b6a28288d7265619866a786b676208a68a WatchSource:0}: Error finding container db24319c583bb3c13167095918d104b6a28288d7265619866a786b676208a68a: Status 404 returned error can't find the container with id db24319c583bb3c13167095918d104b6a28288d7265619866a786b676208a68a Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.461581 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svrtf" event={"ID":"38908ed6-c218-40db-8cec-eed17dbde6e4","Type":"ContainerStarted","Data":"8add9dd574ba305d2fde1ac418eef025d995828e1602740c6c907468b38b3774"} Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.461974 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svrtf" event={"ID":"38908ed6-c218-40db-8cec-eed17dbde6e4","Type":"ContainerStarted","Data":"db24319c583bb3c13167095918d104b6a28288d7265619866a786b676208a68a"} Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.469595 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" event={"ID":"82c589d7-8e06-40e1-9092-d63e99022767","Type":"ContainerStarted","Data":"7eb6cb8c5f2dd427ce401ff1158cce882c28c222099eea04b96cc1e6ed03b9bb"} Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.469760 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.509843 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-svrtf" podStartSLOduration=1.509829212 podStartE2EDuration="1.509829212s" podCreationTimestamp="2025-12-10 13:16:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:01.484972265 +0000 UTC m=+1158.701194189" watchObservedRunningTime="2025-12-10 13:16:01.509829212 +0000 UTC m=+1158.726051136" Dec 10 13:16:01 crc kubenswrapper[4921]: I1210 13:16:01.512515 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" podStartSLOduration=3.512503754 podStartE2EDuration="3.512503754s" podCreationTimestamp="2025-12-10 13:15:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:01.508429334 +0000 UTC m=+1158.724651278" watchObservedRunningTime="2025-12-10 13:16:01.512503754 +0000 UTC m=+1158.728725678" Dec 10 13:16:02 crc kubenswrapper[4921]: I1210 13:16:02.357772 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:02 crc kubenswrapper[4921]: I1210 13:16:02.377734 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.502038 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4f67c57-1639-4769-bdc2-22991d1a145d","Type":"ContainerStarted","Data":"eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970"} Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.504211 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd","Type":"ContainerStarted","Data":"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf"} Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.505595 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"707274db-73e2-467c-9689-3712a3e404b6","Type":"ContainerStarted","Data":"cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461"} Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.505750 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="707274db-73e2-467c-9689-3712a3e404b6" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461" gracePeriod=30 Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.511458 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"96c81abc-1c0b-47f6-9af8-bbf58960255d","Type":"ContainerStarted","Data":"6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e"} Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.536907 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.356177291 podStartE2EDuration="6.53688441s" podCreationTimestamp="2025-12-10 13:15:58 +0000 UTC" firstStartedPulling="2025-12-10 13:15:59.832101106 +0000 UTC m=+1157.048323030" lastFinishedPulling="2025-12-10 13:16:04.012808225 +0000 UTC m=+1161.229030149" observedRunningTime="2025-12-10 13:16:04.518541807 +0000 UTC m=+1161.734763731" watchObservedRunningTime="2025-12-10 13:16:04.53688441 +0000 UTC m=+1161.753106334" Dec 10 13:16:04 crc kubenswrapper[4921]: I1210 13:16:04.547451 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.381620433 podStartE2EDuration="6.547429792s" podCreationTimestamp="2025-12-10 13:15:58 +0000 UTC" firstStartedPulling="2025-12-10 13:15:59.844836838 +0000 UTC m=+1157.061058762" lastFinishedPulling="2025-12-10 13:16:04.010646197 +0000 UTC m=+1161.226868121" observedRunningTime="2025-12-10 13:16:04.54101474 +0000 UTC m=+1161.757236674" watchObservedRunningTime="2025-12-10 13:16:04.547429792 +0000 UTC m=+1161.763651726" Dec 10 13:16:05 crc kubenswrapper[4921]: I1210 13:16:05.525588 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4f67c57-1639-4769-bdc2-22991d1a145d","Type":"ContainerStarted","Data":"ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b"} Dec 10 13:16:05 crc kubenswrapper[4921]: I1210 13:16:05.531819 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-log" containerID="cri-o://a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf" gracePeriod=30 Dec 10 13:16:05 crc kubenswrapper[4921]: I1210 13:16:05.532086 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd","Type":"ContainerStarted","Data":"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f"} Dec 10 13:16:05 crc kubenswrapper[4921]: I1210 13:16:05.532152 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-metadata" containerID="cri-o://d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f" gracePeriod=30 Dec 10 13:16:05 crc kubenswrapper[4921]: I1210 13:16:05.551183 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.347192817 podStartE2EDuration="7.55116455s" podCreationTimestamp="2025-12-10 13:15:58 +0000 UTC" firstStartedPulling="2025-12-10 13:15:59.814845763 +0000 UTC m=+1157.031067687" lastFinishedPulling="2025-12-10 13:16:04.018817496 +0000 UTC m=+1161.235039420" observedRunningTime="2025-12-10 13:16:05.54184527 +0000 UTC m=+1162.758067214" watchObservedRunningTime="2025-12-10 13:16:05.55116455 +0000 UTC m=+1162.767386494" Dec 10 13:16:05 crc kubenswrapper[4921]: I1210 13:16:05.570553 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.112181819 podStartE2EDuration="7.57053524s" podCreationTimestamp="2025-12-10 13:15:58 +0000 UTC" firstStartedPulling="2025-12-10 13:15:59.558927054 +0000 UTC m=+1156.775148978" lastFinishedPulling="2025-12-10 13:16:04.017280475 +0000 UTC m=+1161.233502399" observedRunningTime="2025-12-10 13:16:05.563131671 +0000 UTC m=+1162.779353595" watchObservedRunningTime="2025-12-10 13:16:05.57053524 +0000 UTC m=+1162.786757164" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.144906 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.256007 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgtxb\" (UniqueName: \"kubernetes.io/projected/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-kube-api-access-cgtxb\") pod \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.256195 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-logs\") pod \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.256240 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-config-data\") pod \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.256260 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-combined-ca-bundle\") pod \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\" (UID: \"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd\") " Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.258781 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-logs" (OuterVolumeSpecName: "logs") pod "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" (UID: "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.263099 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-kube-api-access-cgtxb" (OuterVolumeSpecName: "kube-api-access-cgtxb") pod "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" (UID: "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd"). InnerVolumeSpecName "kube-api-access-cgtxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.284277 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" (UID: "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.301666 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-config-data" (OuterVolumeSpecName: "config-data") pod "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" (UID: "ab217767-b6c4-4041-a7a6-54d8f8f9c2fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.358283 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.358317 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.358331 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgtxb\" (UniqueName: \"kubernetes.io/projected/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-kube-api-access-cgtxb\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.358344 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541421 4921 generic.go:334] "Generic (PLEG): container finished" podID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerID="d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f" exitCode=0 Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541460 4921 generic.go:334] "Generic (PLEG): container finished" podID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerID="a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf" exitCode=143 Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541461 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd","Type":"ContainerDied","Data":"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f"} Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541502 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd","Type":"ContainerDied","Data":"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf"} Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541514 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ab217767-b6c4-4041-a7a6-54d8f8f9c2fd","Type":"ContainerDied","Data":"3aa40aac0b9e7c1231b794fb7c8542e5661e15348faac8939e470842a2005abc"} Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541513 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.541528 4921 scope.go:117] "RemoveContainer" containerID="d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.566857 4921 scope.go:117] "RemoveContainer" containerID="a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.577196 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.592867 4921 scope.go:117] "RemoveContainer" containerID="d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f" Dec 10 13:16:06 crc kubenswrapper[4921]: E1210 13:16:06.593414 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f\": container with ID starting with d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f not found: ID does not exist" containerID="d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.593446 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f"} err="failed to get container status \"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f\": rpc error: code = NotFound desc = could not find container \"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f\": container with ID starting with d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f not found: ID does not exist" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.593473 4921 scope.go:117] "RemoveContainer" containerID="a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf" Dec 10 13:16:06 crc kubenswrapper[4921]: E1210 13:16:06.593866 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf\": container with ID starting with a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf not found: ID does not exist" containerID="a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.593925 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf"} err="failed to get container status \"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf\": rpc error: code = NotFound desc = could not find container \"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf\": container with ID starting with a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf not found: ID does not exist" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.593964 4921 scope.go:117] "RemoveContainer" containerID="d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.594441 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f"} err="failed to get container status \"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f\": rpc error: code = NotFound desc = could not find container \"d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f\": container with ID starting with d653f4813328d4c180a144ad81d271cec327ddf40554cb5e951f93af39e4123f not found: ID does not exist" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.594463 4921 scope.go:117] "RemoveContainer" containerID="a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.594721 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf"} err="failed to get container status \"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf\": rpc error: code = NotFound desc = could not find container \"a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf\": container with ID starting with a0aec24cbfa3846997b776b2502ac9dab0ee447efe1f87c20a4aafe8ea72aacf not found: ID does not exist" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.597146 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.610505 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:06 crc kubenswrapper[4921]: E1210 13:16:06.610947 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-metadata" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.610964 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-metadata" Dec 10 13:16:06 crc kubenswrapper[4921]: E1210 13:16:06.610981 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-log" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.610987 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-log" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.611159 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-metadata" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.611186 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" containerName="nova-metadata-log" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.612151 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.615201 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.615375 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.643380 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.663722 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-logs\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.663821 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-config-data\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.663842 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-442cc\" (UniqueName: \"kubernetes.io/projected/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-kube-api-access-442cc\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.663857 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.663940 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.765932 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-logs\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.766057 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-config-data\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.766099 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-442cc\" (UniqueName: \"kubernetes.io/projected/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-kube-api-access-442cc\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.766126 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.766221 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.767707 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-logs\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.773018 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.783058 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-config-data\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.787778 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-442cc\" (UniqueName: \"kubernetes.io/projected/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-kube-api-access-442cc\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.787785 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " pod="openstack/nova-metadata-0" Dec 10 13:16:06 crc kubenswrapper[4921]: I1210 13:16:06.929230 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:07 crc kubenswrapper[4921]: I1210 13:16:07.204355 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab217767-b6c4-4041-a7a6-54d8f8f9c2fd" path="/var/lib/kubelet/pods/ab217767-b6c4-4041-a7a6-54d8f8f9c2fd/volumes" Dec 10 13:16:07 crc kubenswrapper[4921]: I1210 13:16:07.421846 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:07 crc kubenswrapper[4921]: I1210 13:16:07.553777 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f","Type":"ContainerStarted","Data":"f4595ebc7a07649f700358890c8dabcbd8299e46f22ad14b5f7ae61844450ec8"} Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.565239 4921 generic.go:334] "Generic (PLEG): container finished" podID="8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" containerID="f8d9026a39c7ffd4d66826e57d9fc56bfcd85fc8e815a1ac41ed39bbbac753ac" exitCode=0 Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.565294 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-69vnb" event={"ID":"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c","Type":"ContainerDied","Data":"f8d9026a39c7ffd4d66826e57d9fc56bfcd85fc8e815a1ac41ed39bbbac753ac"} Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.568846 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f","Type":"ContainerStarted","Data":"3d7d99b1d78346ddd9aa7ea8b177930ba43387fc361a94125708544a4978373f"} Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.568907 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f","Type":"ContainerStarted","Data":"2a4e85970c8699ceacce5ec857ea1af6797fcfd683a8ea240fe5a1476b974b87"} Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.605038 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.605015287 podStartE2EDuration="2.605015287s" podCreationTimestamp="2025-12-10 13:16:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:08.596297413 +0000 UTC m=+1165.812519357" watchObservedRunningTime="2025-12-10 13:16:08.605015287 +0000 UTC m=+1165.821237211" Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.825424 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 13:16:08 crc kubenswrapper[4921]: I1210 13:16:08.825490 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.095434 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.095848 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.111782 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.128205 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.174860 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.246969 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-xxl5s"] Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.247224 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerName="dnsmasq-dns" containerID="cri-o://e91d133c156ed0040ceda5bf5182460f29395de7ba613bc03c0b9cd9cc31a238" gracePeriod=10 Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.618510 4921 generic.go:334] "Generic (PLEG): container finished" podID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerID="e91d133c156ed0040ceda5bf5182460f29395de7ba613bc03c0b9cd9cc31a238" exitCode=0 Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.618858 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" event={"ID":"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989","Type":"ContainerDied","Data":"e91d133c156ed0040ceda5bf5182460f29395de7ba613bc03c0b9cd9cc31a238"} Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.636654 4921 generic.go:334] "Generic (PLEG): container finished" podID="38908ed6-c218-40db-8cec-eed17dbde6e4" containerID="8add9dd574ba305d2fde1ac418eef025d995828e1602740c6c907468b38b3774" exitCode=0 Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.636890 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svrtf" event={"ID":"38908ed6-c218-40db-8cec-eed17dbde6e4","Type":"ContainerDied","Data":"8add9dd574ba305d2fde1ac418eef025d995828e1602740c6c907468b38b3774"} Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.690631 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.828421 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.924310 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.169:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.924625 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.169:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.927823 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-sb\") pod \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.927872 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w59r5\" (UniqueName: \"kubernetes.io/projected/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-kube-api-access-w59r5\") pod \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.928019 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-config\") pod \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.928048 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-dns-svc\") pod \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.928073 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-nb\") pod \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\" (UID: \"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989\") " Dec 10 13:16:09 crc kubenswrapper[4921]: I1210 13:16:09.954111 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-kube-api-access-w59r5" (OuterVolumeSpecName: "kube-api-access-w59r5") pod "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" (UID: "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989"). InnerVolumeSpecName "kube-api-access-w59r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.013840 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" (UID: "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.029907 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.029942 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w59r5\" (UniqueName: \"kubernetes.io/projected/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-kube-api-access-w59r5\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.048777 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.048818 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" (UID: "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.060859 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" (UID: "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.066099 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-config" (OuterVolumeSpecName: "config") pod "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" (UID: "e5bcffc0-2552-4a2d-8fd0-ccfe997bf989"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.131471 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-combined-ca-bundle\") pod \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.131571 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-config-data\") pod \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.131605 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2bxk\" (UniqueName: \"kubernetes.io/projected/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-kube-api-access-q2bxk\") pod \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.131634 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-scripts\") pod \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\" (UID: \"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c\") " Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.132062 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.132076 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.132084 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.136528 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-kube-api-access-q2bxk" (OuterVolumeSpecName: "kube-api-access-q2bxk") pod "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" (UID: "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c"). InnerVolumeSpecName "kube-api-access-q2bxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.136552 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-scripts" (OuterVolumeSpecName: "scripts") pod "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" (UID: "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.157104 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" (UID: "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.160599 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-config-data" (OuterVolumeSpecName: "config-data") pod "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" (UID: "8f5951c8-dfbf-4a11-aeb8-f95531cafe8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.233948 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.233984 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.233993 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2bxk\" (UniqueName: \"kubernetes.io/projected/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-kube-api-access-q2bxk\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.234004 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.646313 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-69vnb" event={"ID":"8f5951c8-dfbf-4a11-aeb8-f95531cafe8c","Type":"ContainerDied","Data":"6b081810060afd190bba8d014048bb431a7825b7a6fd9959e19ce8f89a174c75"} Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.646362 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b081810060afd190bba8d014048bb431a7825b7a6fd9959e19ce8f89a174c75" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.646333 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-69vnb" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.648743 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" event={"ID":"e5bcffc0-2552-4a2d-8fd0-ccfe997bf989","Type":"ContainerDied","Data":"d54038fdfed41690962a7eefc18a0372bf56abe9a68d39cd320affbfe07aac5e"} Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.651084 4921 scope.go:117] "RemoveContainer" containerID="e91d133c156ed0040ceda5bf5182460f29395de7ba613bc03c0b9cd9cc31a238" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.648839 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-xxl5s" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.691547 4921 scope.go:117] "RemoveContainer" containerID="1ef8bc30c0fc19ae8b9c9b15a8bf14f1b88f6e57b02329c58b4f5f18c783dcf1" Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.704980 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-xxl5s"] Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.730225 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-xxl5s"] Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.783250 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.784166 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-api" containerID="cri-o://ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b" gracePeriod=30 Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.783824 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-log" containerID="cri-o://eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970" gracePeriod=30 Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.827188 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.857140 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.857351 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-log" containerID="cri-o://2a4e85970c8699ceacce5ec857ea1af6797fcfd683a8ea240fe5a1476b974b87" gracePeriod=30 Dec 10 13:16:10 crc kubenswrapper[4921]: I1210 13:16:10.857766 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-metadata" containerID="cri-o://3d7d99b1d78346ddd9aa7ea8b177930ba43387fc361a94125708544a4978373f" gracePeriod=30 Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.167589 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.203329 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" path="/var/lib/kubelet/pods/e5bcffc0-2552-4a2d-8fd0-ccfe997bf989/volumes" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.271032 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-config-data\") pod \"38908ed6-c218-40db-8cec-eed17dbde6e4\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.271127 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk9h2\" (UniqueName: \"kubernetes.io/projected/38908ed6-c218-40db-8cec-eed17dbde6e4-kube-api-access-lk9h2\") pod \"38908ed6-c218-40db-8cec-eed17dbde6e4\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.271293 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-scripts\") pod \"38908ed6-c218-40db-8cec-eed17dbde6e4\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.271343 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-combined-ca-bundle\") pod \"38908ed6-c218-40db-8cec-eed17dbde6e4\" (UID: \"38908ed6-c218-40db-8cec-eed17dbde6e4\") " Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.286039 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38908ed6-c218-40db-8cec-eed17dbde6e4-kube-api-access-lk9h2" (OuterVolumeSpecName: "kube-api-access-lk9h2") pod "38908ed6-c218-40db-8cec-eed17dbde6e4" (UID: "38908ed6-c218-40db-8cec-eed17dbde6e4"). InnerVolumeSpecName "kube-api-access-lk9h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.291211 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-scripts" (OuterVolumeSpecName: "scripts") pod "38908ed6-c218-40db-8cec-eed17dbde6e4" (UID: "38908ed6-c218-40db-8cec-eed17dbde6e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.300598 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38908ed6-c218-40db-8cec-eed17dbde6e4" (UID: "38908ed6-c218-40db-8cec-eed17dbde6e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.313558 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-config-data" (OuterVolumeSpecName: "config-data") pod "38908ed6-c218-40db-8cec-eed17dbde6e4" (UID: "38908ed6-c218-40db-8cec-eed17dbde6e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.374254 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk9h2\" (UniqueName: \"kubernetes.io/projected/38908ed6-c218-40db-8cec-eed17dbde6e4-kube-api-access-lk9h2\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.374289 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.374302 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.374311 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38908ed6-c218-40db-8cec-eed17dbde6e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.663555 4921 generic.go:334] "Generic (PLEG): container finished" podID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerID="eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970" exitCode=143 Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.663610 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4f67c57-1639-4769-bdc2-22991d1a145d","Type":"ContainerDied","Data":"eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970"} Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.669333 4921 generic.go:334] "Generic (PLEG): container finished" podID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerID="3d7d99b1d78346ddd9aa7ea8b177930ba43387fc361a94125708544a4978373f" exitCode=0 Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.669364 4921 generic.go:334] "Generic (PLEG): container finished" podID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerID="2a4e85970c8699ceacce5ec857ea1af6797fcfd683a8ea240fe5a1476b974b87" exitCode=143 Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.669483 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f","Type":"ContainerDied","Data":"3d7d99b1d78346ddd9aa7ea8b177930ba43387fc361a94125708544a4978373f"} Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.669514 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f","Type":"ContainerDied","Data":"2a4e85970c8699ceacce5ec857ea1af6797fcfd683a8ea240fe5a1476b974b87"} Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.675729 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-svrtf" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.675731 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-svrtf" event={"ID":"38908ed6-c218-40db-8cec-eed17dbde6e4","Type":"ContainerDied","Data":"db24319c583bb3c13167095918d104b6a28288d7265619866a786b676208a68a"} Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.675779 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db24319c583bb3c13167095918d104b6a28288d7265619866a786b676208a68a" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.756830 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 13:16:11 crc kubenswrapper[4921]: E1210 13:16:11.757245 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" containerName="nova-manage" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757263 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" containerName="nova-manage" Dec 10 13:16:11 crc kubenswrapper[4921]: E1210 13:16:11.757288 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38908ed6-c218-40db-8cec-eed17dbde6e4" containerName="nova-cell1-conductor-db-sync" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757296 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="38908ed6-c218-40db-8cec-eed17dbde6e4" containerName="nova-cell1-conductor-db-sync" Dec 10 13:16:11 crc kubenswrapper[4921]: E1210 13:16:11.757304 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerName="dnsmasq-dns" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757313 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerName="dnsmasq-dns" Dec 10 13:16:11 crc kubenswrapper[4921]: E1210 13:16:11.757330 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerName="init" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757336 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerName="init" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757515 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="38908ed6-c218-40db-8cec-eed17dbde6e4" containerName="nova-cell1-conductor-db-sync" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757528 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" containerName="nova-manage" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.757542 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5bcffc0-2552-4a2d-8fd0-ccfe997bf989" containerName="dnsmasq-dns" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.758115 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.761905 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.797694 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.891911 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.891950 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnklx\" (UniqueName: \"kubernetes.io/projected/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-kube-api-access-hnklx\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.891972 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.929711 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.929763 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.993655 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.993694 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnklx\" (UniqueName: \"kubernetes.io/projected/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-kube-api-access-hnklx\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.993719 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.998943 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:11 crc kubenswrapper[4921]: I1210 13:16:11.999323 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.010927 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnklx\" (UniqueName: \"kubernetes.io/projected/31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6-kube-api-access-hnklx\") pod \"nova-cell1-conductor-0\" (UID: \"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6\") " pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.080496 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.095362 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.196734 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-logs\") pod \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.197207 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-nova-metadata-tls-certs\") pod \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.197252 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-config-data\") pod \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.197323 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-combined-ca-bundle\") pod \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.197365 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-442cc\" (UniqueName: \"kubernetes.io/projected/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-kube-api-access-442cc\") pod \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\" (UID: \"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f\") " Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.197784 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-logs" (OuterVolumeSpecName: "logs") pod "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" (UID: "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.197925 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.204448 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-kube-api-access-442cc" (OuterVolumeSpecName: "kube-api-access-442cc") pod "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" (UID: "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f"). InnerVolumeSpecName "kube-api-access-442cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.241192 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-config-data" (OuterVolumeSpecName: "config-data") pod "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" (UID: "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.280787 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" (UID: "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.290982 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" (UID: "54108cf1-2eb4-49ee-ad9e-1dd94cbd717f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.300044 4921 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.300458 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.301826 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.301846 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-442cc\" (UniqueName: \"kubernetes.io/projected/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f-kube-api-access-442cc\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.607119 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.698786 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54108cf1-2eb4-49ee-ad9e-1dd94cbd717f","Type":"ContainerDied","Data":"f4595ebc7a07649f700358890c8dabcbd8299e46f22ad14b5f7ae61844450ec8"} Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.698821 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.698888 4921 scope.go:117] "RemoveContainer" containerID="3d7d99b1d78346ddd9aa7ea8b177930ba43387fc361a94125708544a4978373f" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.702742 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="96c81abc-1c0b-47f6-9af8-bbf58960255d" containerName="nova-scheduler-scheduler" containerID="cri-o://6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" gracePeriod=30 Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.703009 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6","Type":"ContainerStarted","Data":"ce17d5fd54f963fb59c3344412586633fdc2e56221037fcc2ce6c6da76166fc3"} Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.736115 4921 scope.go:117] "RemoveContainer" containerID="2a4e85970c8699ceacce5ec857ea1af6797fcfd683a8ea240fe5a1476b974b87" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.760618 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.769744 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.793707 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:12 crc kubenswrapper[4921]: E1210 13:16:12.794240 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-metadata" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.794273 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-metadata" Dec 10 13:16:12 crc kubenswrapper[4921]: E1210 13:16:12.794291 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-log" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.794305 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-log" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.794654 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-metadata" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.794706 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" containerName="nova-metadata-log" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.796227 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.800407 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.804691 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.806155 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.817350 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-config-data\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.817453 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/132528a7-80b2-4c2b-89aa-9ab13bd8741a-logs\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.817484 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.817543 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.817575 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ckfn\" (UniqueName: \"kubernetes.io/projected/132528a7-80b2-4c2b-89aa-9ab13bd8741a-kube-api-access-8ckfn\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.919676 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.920305 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ckfn\" (UniqueName: \"kubernetes.io/projected/132528a7-80b2-4c2b-89aa-9ab13bd8741a-kube-api-access-8ckfn\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.920439 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-config-data\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.920547 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/132528a7-80b2-4c2b-89aa-9ab13bd8741a-logs\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.922682 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/132528a7-80b2-4c2b-89aa-9ab13bd8741a-logs\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.922877 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.923724 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.926616 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.927763 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-config-data\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:12 crc kubenswrapper[4921]: I1210 13:16:12.939001 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ckfn\" (UniqueName: \"kubernetes.io/projected/132528a7-80b2-4c2b-89aa-9ab13bd8741a-kube-api-access-8ckfn\") pod \"nova-metadata-0\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " pod="openstack/nova-metadata-0" Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.128426 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.214556 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54108cf1-2eb4-49ee-ad9e-1dd94cbd717f" path="/var/lib/kubelet/pods/54108cf1-2eb4-49ee-ad9e-1dd94cbd717f/volumes" Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.604537 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 13:16:13 crc kubenswrapper[4921]: W1210 13:16:13.629796 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod132528a7_80b2_4c2b_89aa_9ab13bd8741a.slice/crio-810beed87634aa09a7d236f7dc5838f3ac91c49c895aee7df2c85666effbee8b WatchSource:0}: Error finding container 810beed87634aa09a7d236f7dc5838f3ac91c49c895aee7df2c85666effbee8b: Status 404 returned error can't find the container with id 810beed87634aa09a7d236f7dc5838f3ac91c49c895aee7df2c85666effbee8b Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.674576 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.742253 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6","Type":"ContainerStarted","Data":"6ab29bbd11062ee449c4062aadce574b1f756f973a18c4571e029a58b37a0ef7"} Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.742744 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:13 crc kubenswrapper[4921]: I1210 13:16:13.749083 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"132528a7-80b2-4c2b-89aa-9ab13bd8741a","Type":"ContainerStarted","Data":"810beed87634aa09a7d236f7dc5838f3ac91c49c895aee7df2c85666effbee8b"} Dec 10 13:16:14 crc kubenswrapper[4921]: E1210 13:16:14.096615 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 13:16:14 crc kubenswrapper[4921]: E1210 13:16:14.098839 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 13:16:14 crc kubenswrapper[4921]: E1210 13:16:14.101216 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 13:16:14 crc kubenswrapper[4921]: E1210 13:16:14.101338 4921 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="96c81abc-1c0b-47f6-9af8-bbf58960255d" containerName="nova-scheduler-scheduler" Dec 10 13:16:14 crc kubenswrapper[4921]: I1210 13:16:14.766994 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"132528a7-80b2-4c2b-89aa-9ab13bd8741a","Type":"ContainerStarted","Data":"4977baddfd7bfc59078ee2e3e81fcedf383c27532d01418ad6dbf4d447a975ad"} Dec 10 13:16:14 crc kubenswrapper[4921]: I1210 13:16:14.767071 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"132528a7-80b2-4c2b-89aa-9ab13bd8741a","Type":"ContainerStarted","Data":"fd0a6c2e3f05482d03664d7bcd66273ee0229b50f8d78e43e87c6d5fd306fcff"} Dec 10 13:16:14 crc kubenswrapper[4921]: I1210 13:16:14.799046 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.799020887 podStartE2EDuration="2.799020887s" podCreationTimestamp="2025-12-10 13:16:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:14.788456993 +0000 UTC m=+1172.004678937" watchObservedRunningTime="2025-12-10 13:16:14.799020887 +0000 UTC m=+1172.015242811" Dec 10 13:16:14 crc kubenswrapper[4921]: I1210 13:16:14.801478 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.801466633 podStartE2EDuration="3.801466633s" podCreationTimestamp="2025-12-10 13:16:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:13.768838029 +0000 UTC m=+1170.985059953" watchObservedRunningTime="2025-12-10 13:16:14.801466633 +0000 UTC m=+1172.017688557" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.246600 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.247013 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" containerName="kube-state-metrics" containerID="cri-o://d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba" gracePeriod=30 Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.425145 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.534033 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-combined-ca-bundle\") pod \"96c81abc-1c0b-47f6-9af8-bbf58960255d\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.534198 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-config-data\") pod \"96c81abc-1c0b-47f6-9af8-bbf58960255d\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.534248 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cshbk\" (UniqueName: \"kubernetes.io/projected/96c81abc-1c0b-47f6-9af8-bbf58960255d-kube-api-access-cshbk\") pod \"96c81abc-1c0b-47f6-9af8-bbf58960255d\" (UID: \"96c81abc-1c0b-47f6-9af8-bbf58960255d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.556610 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96c81abc-1c0b-47f6-9af8-bbf58960255d-kube-api-access-cshbk" (OuterVolumeSpecName: "kube-api-access-cshbk") pod "96c81abc-1c0b-47f6-9af8-bbf58960255d" (UID: "96c81abc-1c0b-47f6-9af8-bbf58960255d"). InnerVolumeSpecName "kube-api-access-cshbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.581525 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-config-data" (OuterVolumeSpecName: "config-data") pod "96c81abc-1c0b-47f6-9af8-bbf58960255d" (UID: "96c81abc-1c0b-47f6-9af8-bbf58960255d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.600488 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96c81abc-1c0b-47f6-9af8-bbf58960255d" (UID: "96c81abc-1c0b-47f6-9af8-bbf58960255d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.631904 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.638443 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.638481 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cshbk\" (UniqueName: \"kubernetes.io/projected/96c81abc-1c0b-47f6-9af8-bbf58960255d-kube-api-access-cshbk\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.638493 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c81abc-1c0b-47f6-9af8-bbf58960255d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.740304 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4f67c57-1639-4769-bdc2-22991d1a145d-logs\") pod \"b4f67c57-1639-4769-bdc2-22991d1a145d\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.740642 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99rpl\" (UniqueName: \"kubernetes.io/projected/b4f67c57-1639-4769-bdc2-22991d1a145d-kube-api-access-99rpl\") pod \"b4f67c57-1639-4769-bdc2-22991d1a145d\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.740851 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-config-data\") pod \"b4f67c57-1639-4769-bdc2-22991d1a145d\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.740882 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-combined-ca-bundle\") pod \"b4f67c57-1639-4769-bdc2-22991d1a145d\" (UID: \"b4f67c57-1639-4769-bdc2-22991d1a145d\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.741134 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4f67c57-1639-4769-bdc2-22991d1a145d-logs" (OuterVolumeSpecName: "logs") pod "b4f67c57-1639-4769-bdc2-22991d1a145d" (UID: "b4f67c57-1639-4769-bdc2-22991d1a145d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.742170 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4f67c57-1639-4769-bdc2-22991d1a145d-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.744594 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4f67c57-1639-4769-bdc2-22991d1a145d-kube-api-access-99rpl" (OuterVolumeSpecName: "kube-api-access-99rpl") pod "b4f67c57-1639-4769-bdc2-22991d1a145d" (UID: "b4f67c57-1639-4769-bdc2-22991d1a145d"). InnerVolumeSpecName "kube-api-access-99rpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.752750 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.763640 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-config-data" (OuterVolumeSpecName: "config-data") pod "b4f67c57-1639-4769-bdc2-22991d1a145d" (UID: "b4f67c57-1639-4769-bdc2-22991d1a145d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.785591 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4f67c57-1639-4769-bdc2-22991d1a145d" (UID: "b4f67c57-1639-4769-bdc2-22991d1a145d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.806140 4921 generic.go:334] "Generic (PLEG): container finished" podID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerID="ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b" exitCode=0 Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.806216 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4f67c57-1639-4769-bdc2-22991d1a145d","Type":"ContainerDied","Data":"ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b"} Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.806226 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.806244 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b4f67c57-1639-4769-bdc2-22991d1a145d","Type":"ContainerDied","Data":"265bef2c516ad2db79265fd5a1043516c537ccf4d3bf32b4274fb88990fe27f4"} Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.806260 4921 scope.go:117] "RemoveContainer" containerID="ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.810558 4921 generic.go:334] "Generic (PLEG): container finished" podID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" containerID="d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba" exitCode=2 Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.810621 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4f843ca5-c8e0-4c44-a626-3cb41c83bab3","Type":"ContainerDied","Data":"d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba"} Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.810646 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4f843ca5-c8e0-4c44-a626-3cb41c83bab3","Type":"ContainerDied","Data":"36cdb02f3799e4df826fd1642d79c27d8c34e649a3eb10fdf2f68a57b400df50"} Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.810694 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.817622 4921 generic.go:334] "Generic (PLEG): container finished" podID="96c81abc-1c0b-47f6-9af8-bbf58960255d" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" exitCode=0 Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.817667 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"96c81abc-1c0b-47f6-9af8-bbf58960255d","Type":"ContainerDied","Data":"6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e"} Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.817694 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"96c81abc-1c0b-47f6-9af8-bbf58960255d","Type":"ContainerDied","Data":"ea859c598fcb013f84a8240036ee8ede6dcb9049a8ab3d1e86044ce767982949"} Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.817748 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.831797 4921 scope.go:117] "RemoveContainer" containerID="eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.845038 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxmg9\" (UniqueName: \"kubernetes.io/projected/4f843ca5-c8e0-4c44-a626-3cb41c83bab3-kube-api-access-wxmg9\") pod \"4f843ca5-c8e0-4c44-a626-3cb41c83bab3\" (UID: \"4f843ca5-c8e0-4c44-a626-3cb41c83bab3\") " Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.845730 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99rpl\" (UniqueName: \"kubernetes.io/projected/b4f67c57-1639-4769-bdc2-22991d1a145d-kube-api-access-99rpl\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.845755 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.845766 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f67c57-1639-4769-bdc2-22991d1a145d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.857224 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f843ca5-c8e0-4c44-a626-3cb41c83bab3-kube-api-access-wxmg9" (OuterVolumeSpecName: "kube-api-access-wxmg9") pod "4f843ca5-c8e0-4c44-a626-3cb41c83bab3" (UID: "4f843ca5-c8e0-4c44-a626-3cb41c83bab3"). InnerVolumeSpecName "kube-api-access-wxmg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.867157 4921 scope.go:117] "RemoveContainer" containerID="ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b" Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.870194 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b\": container with ID starting with ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b not found: ID does not exist" containerID="ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.870248 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b"} err="failed to get container status \"ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b\": rpc error: code = NotFound desc = could not find container \"ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b\": container with ID starting with ef139bf18fe8ab29a003648fe312252d8ba39c9a126e96b6c1e10dc58e881f2b not found: ID does not exist" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.870285 4921 scope.go:117] "RemoveContainer" containerID="eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970" Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.878871 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970\": container with ID starting with eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970 not found: ID does not exist" containerID="eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.878920 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970"} err="failed to get container status \"eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970\": rpc error: code = NotFound desc = could not find container \"eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970\": container with ID starting with eff45db28c9a739b654706c16f8b89dbb53a841dd084499d9e0ad849a5dbe970 not found: ID does not exist" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.878951 4921 scope.go:117] "RemoveContainer" containerID="d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.883947 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.903721 4921 scope.go:117] "RemoveContainer" containerID="d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.905017 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.906919 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba\": container with ID starting with d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba not found: ID does not exist" containerID="d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.907048 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba"} err="failed to get container status \"d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba\": rpc error: code = NotFound desc = could not find container \"d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba\": container with ID starting with d73e4e8f32e5154f17997f872f9655aaaccfd134bbb7b726acf098a2a36e35ba not found: ID does not exist" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.907135 4921 scope.go:117] "RemoveContainer" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920062 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.920407 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-log" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920422 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-log" Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.920440 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c81abc-1c0b-47f6-9af8-bbf58960255d" containerName="nova-scheduler-scheduler" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920447 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c81abc-1c0b-47f6-9af8-bbf58960255d" containerName="nova-scheduler-scheduler" Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.920461 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-api" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920466 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-api" Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.920474 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" containerName="kube-state-metrics" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920480 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" containerName="kube-state-metrics" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920655 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-log" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920679 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" containerName="kube-state-metrics" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920690 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" containerName="nova-api-api" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.920700 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c81abc-1c0b-47f6-9af8-bbf58960255d" containerName="nova-scheduler-scheduler" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.921528 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.925221 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.935048 4921 scope.go:117] "RemoveContainer" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" Dec 10 13:16:16 crc kubenswrapper[4921]: E1210 13:16:16.936059 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e\": container with ID starting with 6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e not found: ID does not exist" containerID="6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.936186 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e"} err="failed to get container status \"6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e\": rpc error: code = NotFound desc = could not find container \"6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e\": container with ID starting with 6d949f23252cd8ad1af98dcb6da2062ea02826c5c85748c031f9a5db8331772e not found: ID does not exist" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.942064 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.946889 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxmg9\" (UniqueName: \"kubernetes.io/projected/4f843ca5-c8e0-4c44-a626-3cb41c83bab3-kube-api-access-wxmg9\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.955817 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.963485 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.980571 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.983922 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.990959 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:16 crc kubenswrapper[4921]: I1210 13:16:16.991503 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048314 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gptnd\" (UniqueName: \"kubernetes.io/projected/0d48014d-698e-458a-98fe-40dc5e6fa3ab-kube-api-access-gptnd\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048422 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-logs\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048493 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8phld\" (UniqueName: \"kubernetes.io/projected/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-kube-api-access-8phld\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048563 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048598 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-config-data\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048613 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.048644 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-config-data\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.114285 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150194 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gptnd\" (UniqueName: \"kubernetes.io/projected/0d48014d-698e-458a-98fe-40dc5e6fa3ab-kube-api-access-gptnd\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150252 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-logs\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150284 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8phld\" (UniqueName: \"kubernetes.io/projected/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-kube-api-access-8phld\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150335 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150380 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-config-data\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150419 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.150468 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-config-data\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.151354 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-logs\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.155625 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.165972 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.169648 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-config-data\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.176407 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-config-data\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.178039 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.189491 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.198129 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8phld\" (UniqueName: \"kubernetes.io/projected/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-kube-api-access-8phld\") pod \"nova-api-0\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.199949 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gptnd\" (UniqueName: \"kubernetes.io/projected/0d48014d-698e-458a-98fe-40dc5e6fa3ab-kube-api-access-gptnd\") pod \"nova-scheduler-0\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.220987 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f843ca5-c8e0-4c44-a626-3cb41c83bab3" path="/var/lib/kubelet/pods/4f843ca5-c8e0-4c44-a626-3cb41c83bab3/volumes" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.221496 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96c81abc-1c0b-47f6-9af8-bbf58960255d" path="/var/lib/kubelet/pods/96c81abc-1c0b-47f6-9af8-bbf58960255d/volumes" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.221984 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4f67c57-1639-4769-bdc2-22991d1a145d" path="/var/lib/kubelet/pods/b4f67c57-1639-4769-bdc2-22991d1a145d/volumes" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.222531 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.223453 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.223561 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.226931 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.227089 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.246669 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.307836 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.360219 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkx4l\" (UniqueName: \"kubernetes.io/projected/d263bef7-339e-4d10-b012-a81641e98b7d-kube-api-access-nkx4l\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.360253 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.360293 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.360371 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.461585 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkx4l\" (UniqueName: \"kubernetes.io/projected/d263bef7-339e-4d10-b012-a81641e98b7d-kube-api-access-nkx4l\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.461627 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.461689 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.461797 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.465873 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.466151 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.466488 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d263bef7-339e-4d10-b012-a81641e98b7d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.478605 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkx4l\" (UniqueName: \"kubernetes.io/projected/d263bef7-339e-4d10-b012-a81641e98b7d-kube-api-access-nkx4l\") pod \"kube-state-metrics-0\" (UID: \"d263bef7-339e-4d10-b012-a81641e98b7d\") " pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.619891 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.620153 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-central-agent" containerID="cri-o://9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" gracePeriod=30 Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.620263 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="proxy-httpd" containerID="cri-o://64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" gracePeriod=30 Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.620304 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="sg-core" containerID="cri-o://87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" gracePeriod=30 Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.620335 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-notification-agent" containerID="cri-o://437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" gracePeriod=30 Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.661222 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.741603 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.837501 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.857052 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3","Type":"ContainerStarted","Data":"5b1e52702f3b3ecd1f06901cb5b131334dd7100f89c1e899508d8ee95b76c9f3"} Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.941625 4921 generic.go:334] "Generic (PLEG): container finished" podID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerID="87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" exitCode=2 Dec 10 13:16:17 crc kubenswrapper[4921]: I1210 13:16:17.941683 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerDied","Data":"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.129119 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.129453 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.144660 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.215559 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.503300 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.586955 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-config-data\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587055 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-run-httpd\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587131 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-combined-ca-bundle\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587235 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-594x6\" (UniqueName: \"kubernetes.io/projected/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-kube-api-access-594x6\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587275 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-scripts\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587336 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-log-httpd\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587362 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-sg-core-conf-yaml\") pod \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\" (UID: \"0eea7b39-fcbc-416a-8f38-5bd99ba10a03\") " Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.587987 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.589132 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.589932 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.590152 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.593811 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-kube-api-access-594x6" (OuterVolumeSpecName: "kube-api-access-594x6") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "kube-api-access-594x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.595594 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-scripts" (OuterVolumeSpecName: "scripts") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.685746 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.692619 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-594x6\" (UniqueName: \"kubernetes.io/projected/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-kube-api-access-594x6\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.692645 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.692656 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.759682 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-config-data" (OuterVolumeSpecName: "config-data") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.771060 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0eea7b39-fcbc-416a-8f38-5bd99ba10a03" (UID: "0eea7b39-fcbc-416a-8f38-5bd99ba10a03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.795230 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.795362 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eea7b39-fcbc-416a-8f38-5bd99ba10a03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.962285 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d48014d-698e-458a-98fe-40dc5e6fa3ab","Type":"ContainerStarted","Data":"9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.962411 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d48014d-698e-458a-98fe-40dc5e6fa3ab","Type":"ContainerStarted","Data":"e1d06bb3e9e6e5751cd33835ebfa5352938bc292c8fd3a92ef2454296253875b"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967662 4921 generic.go:334] "Generic (PLEG): container finished" podID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerID="64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" exitCode=0 Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967690 4921 generic.go:334] "Generic (PLEG): container finished" podID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerID="437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" exitCode=0 Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967698 4921 generic.go:334] "Generic (PLEG): container finished" podID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerID="9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" exitCode=0 Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967740 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerDied","Data":"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967765 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerDied","Data":"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967775 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerDied","Data":"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967784 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0eea7b39-fcbc-416a-8f38-5bd99ba10a03","Type":"ContainerDied","Data":"64d30dd77afac4a57129603cada4ac74c859a4f6e72dce3c54f66c15b0b0b337"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967801 4921 scope.go:117] "RemoveContainer" containerID="64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.967909 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.980124 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3","Type":"ContainerStarted","Data":"6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.980327 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3","Type":"ContainerStarted","Data":"187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.985266 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d263bef7-339e-4d10-b012-a81641e98b7d","Type":"ContainerStarted","Data":"4f086726c587130fcd2593490a6fe9129060e3c4362a932dd02b26d525c2f714"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.985325 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d263bef7-339e-4d10-b012-a81641e98b7d","Type":"ContainerStarted","Data":"27901463af12998428e475ddef09eedde7c19fcfed824055c4eac6fa3e0d5ef0"} Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.985574 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 10 13:16:18 crc kubenswrapper[4921]: I1210 13:16:18.986746 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.986730584 podStartE2EDuration="2.986730584s" podCreationTimestamp="2025-12-10 13:16:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:18.977536077 +0000 UTC m=+1176.193758011" watchObservedRunningTime="2025-12-10 13:16:18.986730584 +0000 UTC m=+1176.202952518" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.003874 4921 scope.go:117] "RemoveContainer" containerID="87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.025488 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.025468044 podStartE2EDuration="3.025468044s" podCreationTimestamp="2025-12-10 13:16:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:19.016312948 +0000 UTC m=+1176.232534882" watchObservedRunningTime="2025-12-10 13:16:19.025468044 +0000 UTC m=+1176.241689968" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.039049 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.655172766 podStartE2EDuration="2.039033588s" podCreationTimestamp="2025-12-10 13:16:17 +0000 UTC" firstStartedPulling="2025-12-10 13:16:18.215311211 +0000 UTC m=+1175.431533135" lastFinishedPulling="2025-12-10 13:16:18.599172033 +0000 UTC m=+1175.815393957" observedRunningTime="2025-12-10 13:16:19.036409507 +0000 UTC m=+1176.252631451" watchObservedRunningTime="2025-12-10 13:16:19.039033588 +0000 UTC m=+1176.255255512" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.039198 4921 scope.go:117] "RemoveContainer" containerID="437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.088621 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.124368 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.129232 4921 scope.go:117] "RemoveContainer" containerID="9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.157272 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.158235 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="proxy-httpd" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158248 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="proxy-httpd" Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.158274 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-central-agent" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158281 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-central-agent" Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.158294 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-notification-agent" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158304 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-notification-agent" Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.158323 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="sg-core" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158330 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="sg-core" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158654 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-notification-agent" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158684 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="sg-core" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158709 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="ceilometer-central-agent" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.158729 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" containerName="proxy-httpd" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.166423 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.177639 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.191057 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.206838 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.230252 4921 scope.go:117] "RemoveContainer" containerID="64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.232005 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eea7b39-fcbc-416a-8f38-5bd99ba10a03" path="/var/lib/kubelet/pods/0eea7b39-fcbc-416a-8f38-5bd99ba10a03/volumes" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.232869 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.237667 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": container with ID starting with 64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b not found: ID does not exist" containerID="64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.237840 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b"} err="failed to get container status \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": rpc error: code = NotFound desc = could not find container \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": container with ID starting with 64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.237941 4921 scope.go:117] "RemoveContainer" containerID="87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.238646 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": container with ID starting with 87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368 not found: ID does not exist" containerID="87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.238752 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368"} err="failed to get container status \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": rpc error: code = NotFound desc = could not find container \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": container with ID starting with 87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.238849 4921 scope.go:117] "RemoveContainer" containerID="437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.243450 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": container with ID starting with 437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6 not found: ID does not exist" containerID="437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.243500 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6"} err="failed to get container status \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": rpc error: code = NotFound desc = could not find container \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": container with ID starting with 437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.243533 4921 scope.go:117] "RemoveContainer" containerID="9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" Dec 10 13:16:19 crc kubenswrapper[4921]: E1210 13:16:19.244121 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": container with ID starting with 9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330 not found: ID does not exist" containerID="9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.244191 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330"} err="failed to get container status \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": rpc error: code = NotFound desc = could not find container \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": container with ID starting with 9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.244219 4921 scope.go:117] "RemoveContainer" containerID="64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.253071 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b"} err="failed to get container status \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": rpc error: code = NotFound desc = could not find container \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": container with ID starting with 64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.253121 4921 scope.go:117] "RemoveContainer" containerID="87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.256445 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368"} err="failed to get container status \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": rpc error: code = NotFound desc = could not find container \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": container with ID starting with 87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.256490 4921 scope.go:117] "RemoveContainer" containerID="437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.267559 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6"} err="failed to get container status \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": rpc error: code = NotFound desc = could not find container \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": container with ID starting with 437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.267602 4921 scope.go:117] "RemoveContainer" containerID="9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.276520 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330"} err="failed to get container status \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": rpc error: code = NotFound desc = could not find container \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": container with ID starting with 9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.276558 4921 scope.go:117] "RemoveContainer" containerID="64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.286525 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b"} err="failed to get container status \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": rpc error: code = NotFound desc = could not find container \"64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b\": container with ID starting with 64043fd9733e7fd1dbfc87a9c31cc917fa4ebcfed7a23593e131aefea422692b not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.286567 4921 scope.go:117] "RemoveContainer" containerID="87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.295542 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368"} err="failed to get container status \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": rpc error: code = NotFound desc = could not find container \"87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368\": container with ID starting with 87e8bc55085d37341914a7cdcf0df8d269ee407ff60b627d01d842733942a368 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.295583 4921 scope.go:117] "RemoveContainer" containerID="437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.299478 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6"} err="failed to get container status \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": rpc error: code = NotFound desc = could not find container \"437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6\": container with ID starting with 437c15cc494b955bbc4d356c7e9d9812024237cbf10692e5f7e494e9807a10c6 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.299511 4921 scope.go:117] "RemoveContainer" containerID="9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.305600 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330"} err="failed to get container status \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": rpc error: code = NotFound desc = could not find container \"9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330\": container with ID starting with 9b603613014ee6cee9be58a2770ef2ff69165aa482e2cc58c7c4aa9fa2c97330 not found: ID does not exist" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308507 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308559 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-run-httpd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308593 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308652 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-config-data\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308682 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308734 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-log-httpd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308798 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjmwd\" (UniqueName: \"kubernetes.io/projected/43dbe2d1-afbb-424c-976b-ea22edc24254-kube-api-access-mjmwd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.308851 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-scripts\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410364 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-config-data\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410423 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410482 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-log-httpd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410562 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjmwd\" (UniqueName: \"kubernetes.io/projected/43dbe2d1-afbb-424c-976b-ea22edc24254-kube-api-access-mjmwd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410613 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-scripts\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410653 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410674 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-run-httpd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.410701 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.411547 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-log-httpd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.411693 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-run-httpd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.422207 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.426146 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.431008 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.431688 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-scripts\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.437990 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-config-data\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.439059 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjmwd\" (UniqueName: \"kubernetes.io/projected/43dbe2d1-afbb-424c-976b-ea22edc24254-kube-api-access-mjmwd\") pod \"ceilometer-0\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " pod="openstack/ceilometer-0" Dec 10 13:16:19 crc kubenswrapper[4921]: I1210 13:16:19.557742 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:20 crc kubenswrapper[4921]: I1210 13:16:20.059917 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:21 crc kubenswrapper[4921]: I1210 13:16:21.004055 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerStarted","Data":"cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056"} Dec 10 13:16:21 crc kubenswrapper[4921]: I1210 13:16:21.004540 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerStarted","Data":"49403f40ab885867d5a466d200651b6f9b635c8eb5443f4fefe63ecb6b4605c6"} Dec 10 13:16:22 crc kubenswrapper[4921]: I1210 13:16:22.012818 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerStarted","Data":"6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342"} Dec 10 13:16:22 crc kubenswrapper[4921]: I1210 13:16:22.308880 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 13:16:23 crc kubenswrapper[4921]: I1210 13:16:23.038982 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerStarted","Data":"9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc"} Dec 10 13:16:23 crc kubenswrapper[4921]: I1210 13:16:23.129078 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 13:16:23 crc kubenswrapper[4921]: I1210 13:16:23.129120 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 13:16:24 crc kubenswrapper[4921]: I1210 13:16:24.142565 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 13:16:24 crc kubenswrapper[4921]: I1210 13:16:24.142576 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 13:16:25 crc kubenswrapper[4921]: I1210 13:16:25.058831 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerStarted","Data":"b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d"} Dec 10 13:16:25 crc kubenswrapper[4921]: I1210 13:16:25.059247 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:16:25 crc kubenswrapper[4921]: I1210 13:16:25.086766 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.594341298 podStartE2EDuration="6.086741811s" podCreationTimestamp="2025-12-10 13:16:19 +0000 UTC" firstStartedPulling="2025-12-10 13:16:20.051033197 +0000 UTC m=+1177.267255121" lastFinishedPulling="2025-12-10 13:16:24.54343367 +0000 UTC m=+1181.759655634" observedRunningTime="2025-12-10 13:16:25.076374893 +0000 UTC m=+1182.292596837" watchObservedRunningTime="2025-12-10 13:16:25.086741811 +0000 UTC m=+1182.302963735" Dec 10 13:16:27 crc kubenswrapper[4921]: I1210 13:16:27.248189 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 13:16:27 crc kubenswrapper[4921]: I1210 13:16:27.248517 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 13:16:27 crc kubenswrapper[4921]: I1210 13:16:27.308840 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 13:16:27 crc kubenswrapper[4921]: I1210 13:16:27.337244 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 13:16:27 crc kubenswrapper[4921]: I1210 13:16:27.678332 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 10 13:16:28 crc kubenswrapper[4921]: I1210 13:16:28.127204 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 13:16:28 crc kubenswrapper[4921]: I1210 13:16:28.329596 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.177:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 13:16:28 crc kubenswrapper[4921]: I1210 13:16:28.329682 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.177:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 13:16:33 crc kubenswrapper[4921]: I1210 13:16:33.136529 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 13:16:33 crc kubenswrapper[4921]: I1210 13:16:33.138762 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 13:16:33 crc kubenswrapper[4921]: I1210 13:16:33.142994 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 13:16:34 crc kubenswrapper[4921]: I1210 13:16:34.151592 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 13:16:34 crc kubenswrapper[4921]: I1210 13:16:34.992163 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.102348 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-config-data\") pod \"707274db-73e2-467c-9689-3712a3e404b6\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.102470 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-combined-ca-bundle\") pod \"707274db-73e2-467c-9689-3712a3e404b6\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.102546 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2ll7\" (UniqueName: \"kubernetes.io/projected/707274db-73e2-467c-9689-3712a3e404b6-kube-api-access-b2ll7\") pod \"707274db-73e2-467c-9689-3712a3e404b6\" (UID: \"707274db-73e2-467c-9689-3712a3e404b6\") " Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.110979 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707274db-73e2-467c-9689-3712a3e404b6-kube-api-access-b2ll7" (OuterVolumeSpecName: "kube-api-access-b2ll7") pod "707274db-73e2-467c-9689-3712a3e404b6" (UID: "707274db-73e2-467c-9689-3712a3e404b6"). InnerVolumeSpecName "kube-api-access-b2ll7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.132540 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "707274db-73e2-467c-9689-3712a3e404b6" (UID: "707274db-73e2-467c-9689-3712a3e404b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.136491 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-config-data" (OuterVolumeSpecName: "config-data") pod "707274db-73e2-467c-9689-3712a3e404b6" (UID: "707274db-73e2-467c-9689-3712a3e404b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.153769 4921 generic.go:334] "Generic (PLEG): container finished" podID="707274db-73e2-467c-9689-3712a3e404b6" containerID="cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461" exitCode=137 Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.154555 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.154523 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"707274db-73e2-467c-9689-3712a3e404b6","Type":"ContainerDied","Data":"cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461"} Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.154617 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"707274db-73e2-467c-9689-3712a3e404b6","Type":"ContainerDied","Data":"b6bb9831d8f240e302b40ffedf557256d32d2f33e04fdb095b550a1b2ebd9d2a"} Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.154634 4921 scope.go:117] "RemoveContainer" containerID="cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.228912 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.228999 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/707274db-73e2-467c-9689-3712a3e404b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.229022 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2ll7\" (UniqueName: \"kubernetes.io/projected/707274db-73e2-467c-9689-3712a3e404b6-kube-api-access-b2ll7\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.247428 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.253010 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.254489 4921 scope.go:117] "RemoveContainer" containerID="cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461" Dec 10 13:16:35 crc kubenswrapper[4921]: E1210 13:16:35.256096 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461\": container with ID starting with cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461 not found: ID does not exist" containerID="cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.256126 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461"} err="failed to get container status \"cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461\": rpc error: code = NotFound desc = could not find container \"cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461\": container with ID starting with cdcb6c57506241626d8ec7b787d8455562f2c7c307507138de9d1aae584dd461 not found: ID does not exist" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.261829 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:16:35 crc kubenswrapper[4921]: E1210 13:16:35.262217 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707274db-73e2-467c-9689-3712a3e404b6" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.262236 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="707274db-73e2-467c-9689-3712a3e404b6" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.262424 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="707274db-73e2-467c-9689-3712a3e404b6" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.264098 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.275621 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.284217 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.284404 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.284486 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.330463 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.330779 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vlvq\" (UniqueName: \"kubernetes.io/projected/81d397da-3f88-490a-8125-01ddc0b3a196-kube-api-access-6vlvq\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.330979 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.331031 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.331053 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.433326 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.433432 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vlvq\" (UniqueName: \"kubernetes.io/projected/81d397da-3f88-490a-8125-01ddc0b3a196-kube-api-access-6vlvq\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.433523 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.433572 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.433593 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.437971 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.439491 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.439719 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.439788 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d397da-3f88-490a-8125-01ddc0b3a196-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.453987 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vlvq\" (UniqueName: \"kubernetes.io/projected/81d397da-3f88-490a-8125-01ddc0b3a196-kube-api-access-6vlvq\") pod \"nova-cell1-novncproxy-0\" (UID: \"81d397da-3f88-490a-8125-01ddc0b3a196\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:35 crc kubenswrapper[4921]: I1210 13:16:35.600315 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:36 crc kubenswrapper[4921]: I1210 13:16:36.104884 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 13:16:36 crc kubenswrapper[4921]: I1210 13:16:36.167521 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"81d397da-3f88-490a-8125-01ddc0b3a196","Type":"ContainerStarted","Data":"86b2c10eebd4b8983bc0a3e82bbea0f0ac17c113642cf8a70c8af5811184a0bd"} Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.182151 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"81d397da-3f88-490a-8125-01ddc0b3a196","Type":"ContainerStarted","Data":"14151dc491a078f92394759cd9de3d5018fb41defcf0e18b37d8f14be633212b"} Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.209302 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="707274db-73e2-467c-9689-3712a3e404b6" path="/var/lib/kubelet/pods/707274db-73e2-467c-9689-3712a3e404b6/volumes" Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.210821 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.210804708 podStartE2EDuration="2.210804708s" podCreationTimestamp="2025-12-10 13:16:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:37.205400553 +0000 UTC m=+1194.421622477" watchObservedRunningTime="2025-12-10 13:16:37.210804708 +0000 UTC m=+1194.427026632" Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.249991 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.250978 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.252967 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 13:16:37 crc kubenswrapper[4921]: I1210 13:16:37.261233 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.194058 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.199588 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.444239 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5tzmd"] Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.448218 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.489147 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5tzmd"] Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.490119 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.490190 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j74x8\" (UniqueName: \"kubernetes.io/projected/6af75187-fe5e-4c40-9ae4-ab5c70913328-kube-api-access-j74x8\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.490227 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.490312 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-config\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.490420 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.591883 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-config\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.591962 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.592014 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.592048 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j74x8\" (UniqueName: \"kubernetes.io/projected/6af75187-fe5e-4c40-9ae4-ab5c70913328-kube-api-access-j74x8\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.592072 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.592990 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.593043 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-config\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.593157 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.593233 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.615800 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j74x8\" (UniqueName: \"kubernetes.io/projected/6af75187-fe5e-4c40-9ae4-ab5c70913328-kube-api-access-j74x8\") pod \"dnsmasq-dns-68d4b6d797-5tzmd\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:38 crc kubenswrapper[4921]: I1210 13:16:38.768841 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:39 crc kubenswrapper[4921]: I1210 13:16:39.276122 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5tzmd"] Dec 10 13:16:39 crc kubenswrapper[4921]: W1210 13:16:39.284958 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6af75187_fe5e_4c40_9ae4_ab5c70913328.slice/crio-7407ba46870baf686a3180099e43bea72de3f517e7990cc5b3adca8290662212 WatchSource:0}: Error finding container 7407ba46870baf686a3180099e43bea72de3f517e7990cc5b3adca8290662212: Status 404 returned error can't find the container with id 7407ba46870baf686a3180099e43bea72de3f517e7990cc5b3adca8290662212 Dec 10 13:16:40 crc kubenswrapper[4921]: I1210 13:16:40.231026 4921 generic.go:334] "Generic (PLEG): container finished" podID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerID="22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4" exitCode=0 Dec 10 13:16:40 crc kubenswrapper[4921]: I1210 13:16:40.231112 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" event={"ID":"6af75187-fe5e-4c40-9ae4-ab5c70913328","Type":"ContainerDied","Data":"22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4"} Dec 10 13:16:40 crc kubenswrapper[4921]: I1210 13:16:40.231575 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" event={"ID":"6af75187-fe5e-4c40-9ae4-ab5c70913328","Type":"ContainerStarted","Data":"7407ba46870baf686a3180099e43bea72de3f517e7990cc5b3adca8290662212"} Dec 10 13:16:40 crc kubenswrapper[4921]: I1210 13:16:40.600620 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.240453 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" event={"ID":"6af75187-fe5e-4c40-9ae4-ab5c70913328","Type":"ContainerStarted","Data":"0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c"} Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.240872 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.257711 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" podStartSLOduration=3.257690606 podStartE2EDuration="3.257690606s" podCreationTimestamp="2025-12-10 13:16:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:41.256260267 +0000 UTC m=+1198.472482211" watchObservedRunningTime="2025-12-10 13:16:41.257690606 +0000 UTC m=+1198.473912530" Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.321046 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.321416 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-central-agent" containerID="cri-o://cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056" gracePeriod=30 Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.321440 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="proxy-httpd" containerID="cri-o://b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d" gracePeriod=30 Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.321454 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="sg-core" containerID="cri-o://9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc" gracePeriod=30 Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.321495 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-notification-agent" containerID="cri-o://6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342" gracePeriod=30 Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.332587 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.180:3000/\": EOF" Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.624857 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.625059 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-log" containerID="cri-o://187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67" gracePeriod=30 Dec 10 13:16:41 crc kubenswrapper[4921]: I1210 13:16:41.625194 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-api" containerID="cri-o://6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d" gracePeriod=30 Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.251402 4921 generic.go:334] "Generic (PLEG): container finished" podID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerID="b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d" exitCode=0 Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.251728 4921 generic.go:334] "Generic (PLEG): container finished" podID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerID="9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc" exitCode=2 Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.251738 4921 generic.go:334] "Generic (PLEG): container finished" podID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerID="cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056" exitCode=0 Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.251779 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerDied","Data":"b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d"} Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.251807 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerDied","Data":"9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc"} Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.251816 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerDied","Data":"cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056"} Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.254043 4921 generic.go:334] "Generic (PLEG): container finished" podID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerID="187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67" exitCode=143 Dec 10 13:16:42 crc kubenswrapper[4921]: I1210 13:16:42.254845 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3","Type":"ContainerDied","Data":"187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67"} Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.141517 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186512 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-combined-ca-bundle\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186610 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-log-httpd\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186676 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjmwd\" (UniqueName: \"kubernetes.io/projected/43dbe2d1-afbb-424c-976b-ea22edc24254-kube-api-access-mjmwd\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186709 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-run-httpd\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186761 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-sg-core-conf-yaml\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186787 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-config-data\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186813 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-ceilometer-tls-certs\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.186847 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-scripts\") pod \"43dbe2d1-afbb-424c-976b-ea22edc24254\" (UID: \"43dbe2d1-afbb-424c-976b-ea22edc24254\") " Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.187439 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.190056 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.220639 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-scripts" (OuterVolumeSpecName: "scripts") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.232035 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dbe2d1-afbb-424c-976b-ea22edc24254-kube-api-access-mjmwd" (OuterVolumeSpecName: "kube-api-access-mjmwd") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "kube-api-access-mjmwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.271802 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.273649 4921 generic.go:334] "Generic (PLEG): container finished" podID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerID="6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342" exitCode=0 Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.273804 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.289289 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.289319 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjmwd\" (UniqueName: \"kubernetes.io/projected/43dbe2d1-afbb-424c-976b-ea22edc24254-kube-api-access-mjmwd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.289335 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43dbe2d1-afbb-424c-976b-ea22edc24254-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.289346 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.289357 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.309744 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerDied","Data":"6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342"} Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.309790 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43dbe2d1-afbb-424c-976b-ea22edc24254","Type":"ContainerDied","Data":"49403f40ab885867d5a466d200651b6f9b635c8eb5443f4fefe63ecb6b4605c6"} Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.309808 4921 scope.go:117] "RemoveContainer" containerID="b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.316501 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.328378 4921 scope.go:117] "RemoveContainer" containerID="9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.339451 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.352599 4921 scope.go:117] "RemoveContainer" containerID="6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.375607 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-config-data" (OuterVolumeSpecName: "config-data") pod "43dbe2d1-afbb-424c-976b-ea22edc24254" (UID: "43dbe2d1-afbb-424c-976b-ea22edc24254"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.375798 4921 scope.go:117] "RemoveContainer" containerID="cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.390500 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.390539 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.390552 4921 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/43dbe2d1-afbb-424c-976b-ea22edc24254-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.409644 4921 scope.go:117] "RemoveContainer" containerID="b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.412918 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d\": container with ID starting with b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d not found: ID does not exist" containerID="b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.412952 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d"} err="failed to get container status \"b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d\": rpc error: code = NotFound desc = could not find container \"b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d\": container with ID starting with b694e83a473a6e12f58d383137f5d96e7ced0e2523c33637a10c6e3c47781c9d not found: ID does not exist" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.412974 4921 scope.go:117] "RemoveContainer" containerID="9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.413345 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc\": container with ID starting with 9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc not found: ID does not exist" containerID="9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.413401 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc"} err="failed to get container status \"9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc\": rpc error: code = NotFound desc = could not find container \"9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc\": container with ID starting with 9b562bc480a5c1d5a68dea6e162b817f75751b709f2508abe5732383497290bc not found: ID does not exist" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.413434 4921 scope.go:117] "RemoveContainer" containerID="6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.413725 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342\": container with ID starting with 6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342 not found: ID does not exist" containerID="6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.413750 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342"} err="failed to get container status \"6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342\": rpc error: code = NotFound desc = could not find container \"6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342\": container with ID starting with 6ad84247bd240cc7f9326975eb2c4d88ca55f97d7849039215384fc529efe342 not found: ID does not exist" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.413766 4921 scope.go:117] "RemoveContainer" containerID="cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.413988 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056\": container with ID starting with cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056 not found: ID does not exist" containerID="cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.414009 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056"} err="failed to get container status \"cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056\": rpc error: code = NotFound desc = could not find container \"cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056\": container with ID starting with cc6d84d3aca5786a4e97b847bb573cd6a403fc34a023488306367d1e6c979056 not found: ID does not exist" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.609117 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.620411 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.642894 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.643640 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="sg-core" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.643810 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="sg-core" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.643933 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-central-agent" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.644030 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-central-agent" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.644274 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="proxy-httpd" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.644428 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="proxy-httpd" Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.644541 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-notification-agent" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.644622 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-notification-agent" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.644940 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-central-agent" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.645044 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="sg-core" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.645125 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="proxy-httpd" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.645269 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" containerName="ceilometer-notification-agent" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.652791 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.665768 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.666131 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.666297 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.674360 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.800813 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-scripts\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.800879 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.800956 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.801033 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.801089 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-log-httpd\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.801112 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-config-data\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.801213 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzbbx\" (UniqueName: \"kubernetes.io/projected/0659b979-f75b-4ea3-9231-78e3749d2393-kube-api-access-lzbbx\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.801279 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-run-httpd\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.822140 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:43 crc kubenswrapper[4921]: E1210 13:16:43.822769 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-lzbbx log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="0659b979-f75b-4ea3-9231-78e3749d2393" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.902445 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-scripts\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.902508 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.902550 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.902582 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.902620 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-log-httpd\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.902641 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-config-data\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.903374 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzbbx\" (UniqueName: \"kubernetes.io/projected/0659b979-f75b-4ea3-9231-78e3749d2393-kube-api-access-lzbbx\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.903551 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-run-httpd\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.904035 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-run-httpd\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.904750 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-log-httpd\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.907951 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.909592 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.920015 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.925939 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-config-data\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.948875 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-scripts\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:43 crc kubenswrapper[4921]: I1210 13:16:43.958304 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzbbx\" (UniqueName: \"kubernetes.io/projected/0659b979-f75b-4ea3-9231-78e3749d2393-kube-api-access-lzbbx\") pod \"ceilometer-0\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " pod="openstack/ceilometer-0" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.280059 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.292883 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414357 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-ceilometer-tls-certs\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414430 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-config-data\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414467 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-log-httpd\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414484 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-run-httpd\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414522 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-combined-ca-bundle\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414589 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-sg-core-conf-yaml\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414615 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzbbx\" (UniqueName: \"kubernetes.io/projected/0659b979-f75b-4ea3-9231-78e3749d2393-kube-api-access-lzbbx\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414635 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-scripts\") pod \"0659b979-f75b-4ea3-9231-78e3749d2393\" (UID: \"0659b979-f75b-4ea3-9231-78e3749d2393\") " Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414805 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.414931 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.415417 4921 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.415438 4921 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0659b979-f75b-4ea3-9231-78e3749d2393-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.419780 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-config-data" (OuterVolumeSpecName: "config-data") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.419797 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0659b979-f75b-4ea3-9231-78e3749d2393-kube-api-access-lzbbx" (OuterVolumeSpecName: "kube-api-access-lzbbx") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "kube-api-access-lzbbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.419844 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.421584 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.434166 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-scripts" (OuterVolumeSpecName: "scripts") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.438984 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0659b979-f75b-4ea3-9231-78e3749d2393" (UID: "0659b979-f75b-4ea3-9231-78e3749d2393"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.516040 4921 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.516080 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzbbx\" (UniqueName: \"kubernetes.io/projected/0659b979-f75b-4ea3-9231-78e3749d2393-kube-api-access-lzbbx\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.516093 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.516101 4921 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.516110 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:44 crc kubenswrapper[4921]: I1210 13:16:44.516118 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0659b979-f75b-4ea3-9231-78e3749d2393-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.188165 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.203186 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43dbe2d1-afbb-424c-976b-ea22edc24254" path="/var/lib/kubelet/pods/43dbe2d1-afbb-424c-976b-ea22edc24254/volumes" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.291287 4921 generic.go:334] "Generic (PLEG): container finished" podID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerID="6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d" exitCode=0 Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.291426 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.291556 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3","Type":"ContainerDied","Data":"6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d"} Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.291600 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3","Type":"ContainerDied","Data":"5b1e52702f3b3ecd1f06901cb5b131334dd7100f89c1e899508d8ee95b76c9f3"} Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.291618 4921 scope.go:117] "RemoveContainer" containerID="6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.292446 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.317605 4921 scope.go:117] "RemoveContainer" containerID="187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.330190 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8phld\" (UniqueName: \"kubernetes.io/projected/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-kube-api-access-8phld\") pod \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.330265 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-combined-ca-bundle\") pod \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.330304 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-logs\") pod \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.330376 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-config-data\") pod \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\" (UID: \"a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3\") " Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.332778 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-logs" (OuterVolumeSpecName: "logs") pod "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" (UID: "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.380066 4921 scope.go:117] "RemoveContainer" containerID="6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.380170 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-kube-api-access-8phld" (OuterVolumeSpecName: "kube-api-access-8phld") pod "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" (UID: "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3"). InnerVolumeSpecName "kube-api-access-8phld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:45 crc kubenswrapper[4921]: E1210 13:16:45.380511 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d\": container with ID starting with 6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d not found: ID does not exist" containerID="6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.380554 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d"} err="failed to get container status \"6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d\": rpc error: code = NotFound desc = could not find container \"6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d\": container with ID starting with 6bb7262aeb0001a66dfe3ddc835ab13efabcdc6269da196ba86e27ee2f3a974d not found: ID does not exist" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.380577 4921 scope.go:117] "RemoveContainer" containerID="187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67" Dec 10 13:16:45 crc kubenswrapper[4921]: E1210 13:16:45.380781 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67\": container with ID starting with 187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67 not found: ID does not exist" containerID="187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.380804 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67"} err="failed to get container status \"187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67\": rpc error: code = NotFound desc = could not find container \"187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67\": container with ID starting with 187447de8c9bf397ffbfb91caaf731f03af3b23218a8b28aa86987355f521c67 not found: ID does not exist" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.396770 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.431464 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.433089 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-config-data" (OuterVolumeSpecName: "config-data") pod "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" (UID: "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.441941 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.441975 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8phld\" (UniqueName: \"kubernetes.io/projected/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-kube-api-access-8phld\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.441987 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.442019 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: E1210 13:16:45.443743 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-api" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.443765 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-api" Dec 10 13:16:45 crc kubenswrapper[4921]: E1210 13:16:45.443790 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-log" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.443796 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-log" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.444970 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-log" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.444996 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" containerName="nova-api-api" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.466085 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.471803 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.473019 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.473136 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.473013 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.476951 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" (UID: "a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.543636 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-scripts\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544020 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-log-httpd\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544049 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544067 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9dgs\" (UniqueName: \"kubernetes.io/projected/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-kube-api-access-b9dgs\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544116 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544135 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544165 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-run-httpd\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544205 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-config-data\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.544271 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.601420 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.622661 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.629567 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.638543 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647400 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647462 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-run-httpd\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647537 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-config-data\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647587 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-scripts\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647670 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-log-httpd\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647697 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647715 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9dgs\" (UniqueName: \"kubernetes.io/projected/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-kube-api-access-b9dgs\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.647763 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.650848 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-log-httpd\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.650994 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-run-httpd\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.652222 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.660684 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.661291 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-scripts\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.661896 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-config-data\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.664676 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.665249 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.666138 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.670143 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.670360 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.670549 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.680491 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9dgs\" (UniqueName: \"kubernetes.io/projected/ded7f5ee-1c28-48c4-bd62-5fa8f40608ef-kube-api-access-b9dgs\") pod \"ceilometer-0\" (UID: \"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef\") " pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.686504 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.749184 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-config-data\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.749239 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-public-tls-certs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.749266 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.749594 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.749669 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abcdde0d-505c-4cc7-9805-09b2fb3e562c-logs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.749706 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6dcm\" (UniqueName: \"kubernetes.io/projected/abcdde0d-505c-4cc7-9805-09b2fb3e562c-kube-api-access-d6dcm\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.810303 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.851099 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-config-data\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.851157 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-public-tls-certs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.851186 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.851253 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.851280 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abcdde0d-505c-4cc7-9805-09b2fb3e562c-logs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.851299 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6dcm\" (UniqueName: \"kubernetes.io/projected/abcdde0d-505c-4cc7-9805-09b2fb3e562c-kube-api-access-d6dcm\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.858354 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abcdde0d-505c-4cc7-9805-09b2fb3e562c-logs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.863967 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.866793 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.868870 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-public-tls-certs\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.873074 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-config-data\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.875836 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6dcm\" (UniqueName: \"kubernetes.io/projected/abcdde0d-505c-4cc7-9805-09b2fb3e562c-kube-api-access-d6dcm\") pod \"nova-api-0\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " pod="openstack/nova-api-0" Dec 10 13:16:45 crc kubenswrapper[4921]: I1210 13:16:45.998164 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.166204 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.306167 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef","Type":"ContainerStarted","Data":"12c703493deb1d8e02fef87f61ff5b1aa0ffae83ba69dd863b3b1e3dcde7d497"} Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.336697 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 10 13:16:46 crc kubenswrapper[4921]: W1210 13:16:46.500647 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabcdde0d_505c_4cc7_9805_09b2fb3e562c.slice/crio-e4de30c86852f3031579aa22a112d59cff2036842712ef62206ed9c611cf87f8 WatchSource:0}: Error finding container e4de30c86852f3031579aa22a112d59cff2036842712ef62206ed9c611cf87f8: Status 404 returned error can't find the container with id e4de30c86852f3031579aa22a112d59cff2036842712ef62206ed9c611cf87f8 Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.502248 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.510536 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-dx9x6"] Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.512304 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.517369 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.518097 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.523442 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dx9x6"] Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.668772 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-scripts\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.668917 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-config-data\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.668997 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.669034 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mplkc\" (UniqueName: \"kubernetes.io/projected/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-kube-api-access-mplkc\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.770421 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-config-data\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.770496 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.770522 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mplkc\" (UniqueName: \"kubernetes.io/projected/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-kube-api-access-mplkc\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.770566 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-scripts\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.774056 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-scripts\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.775046 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-config-data\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.775220 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.793128 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mplkc\" (UniqueName: \"kubernetes.io/projected/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-kube-api-access-mplkc\") pod \"nova-cell1-cell-mapping-dx9x6\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:46 crc kubenswrapper[4921]: I1210 13:16:46.834327 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.203268 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0659b979-f75b-4ea3-9231-78e3749d2393" path="/var/lib/kubelet/pods/0659b979-f75b-4ea3-9231-78e3749d2393/volumes" Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.204492 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3" path="/var/lib/kubelet/pods/a1201fcf-b0b1-4dd4-80aa-d935a7f64fa3/volumes" Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.321609 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"abcdde0d-505c-4cc7-9805-09b2fb3e562c","Type":"ContainerStarted","Data":"09f3e93d6f5f4c2cd2b9d1dbcad906f238b66627431a76f79cc46d8967d0b090"} Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.321662 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"abcdde0d-505c-4cc7-9805-09b2fb3e562c","Type":"ContainerStarted","Data":"712ab96d0e5a8a7b62fe495759c83a77161905eb3dd495f54a2326e27c7a7b37"} Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.321678 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"abcdde0d-505c-4cc7-9805-09b2fb3e562c","Type":"ContainerStarted","Data":"e4de30c86852f3031579aa22a112d59cff2036842712ef62206ed9c611cf87f8"} Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.335997 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dx9x6"] Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.341180 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef","Type":"ContainerStarted","Data":"b6f5ff1ff3f6a0c8a98b4b7ea9d941611ceb938fbe086f44460b6ac6d05deb72"} Dec 10 13:16:47 crc kubenswrapper[4921]: I1210 13:16:47.364374 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.364356022 podStartE2EDuration="2.364356022s" podCreationTimestamp="2025-12-10 13:16:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:47.352531324 +0000 UTC m=+1204.568753258" watchObservedRunningTime="2025-12-10 13:16:47.364356022 +0000 UTC m=+1204.580577946" Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.347418 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dx9x6" event={"ID":"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd","Type":"ContainerStarted","Data":"eb90798c4a10f8296339c8c963a4a07c8ea2bd08b1a4a6006dd9bdd38041087f"} Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.348517 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dx9x6" event={"ID":"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd","Type":"ContainerStarted","Data":"facbe65a17b657a3be8c2945efd4aa7fdf2e0f60bb72bf51a75b96a77ff02b31"} Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.349251 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef","Type":"ContainerStarted","Data":"7269514e86b37a58f62e39064ac5c659513e3cb90db93e4a060aa09b4f71118d"} Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.368771 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-dx9x6" podStartSLOduration=2.368754957 podStartE2EDuration="2.368754957s" podCreationTimestamp="2025-12-10 13:16:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:48.361151253 +0000 UTC m=+1205.577373177" watchObservedRunningTime="2025-12-10 13:16:48.368754957 +0000 UTC m=+1205.584976881" Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.770362 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.838560 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-c6mhs"] Dec 10 13:16:48 crc kubenswrapper[4921]: I1210 13:16:48.838853 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="dnsmasq-dns" containerID="cri-o://7eb6cb8c5f2dd427ce401ff1158cce882c28c222099eea04b96cc1e6ed03b9bb" gracePeriod=10 Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.381187 4921 generic.go:334] "Generic (PLEG): container finished" podID="82c589d7-8e06-40e1-9092-d63e99022767" containerID="7eb6cb8c5f2dd427ce401ff1158cce882c28c222099eea04b96cc1e6ed03b9bb" exitCode=0 Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.381633 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" event={"ID":"82c589d7-8e06-40e1-9092-d63e99022767","Type":"ContainerDied","Data":"7eb6cb8c5f2dd427ce401ff1158cce882c28c222099eea04b96cc1e6ed03b9bb"} Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.395715 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef","Type":"ContainerStarted","Data":"b7d7c6a938f447ebd96963a7a3a89b82bde6b78a2b8cee694defbf8842a82395"} Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.455292 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.531509 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-nb\") pod \"82c589d7-8e06-40e1-9092-d63e99022767\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.531547 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-sb\") pod \"82c589d7-8e06-40e1-9092-d63e99022767\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.531576 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wqjs\" (UniqueName: \"kubernetes.io/projected/82c589d7-8e06-40e1-9092-d63e99022767-kube-api-access-5wqjs\") pod \"82c589d7-8e06-40e1-9092-d63e99022767\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.531969 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-config\") pod \"82c589d7-8e06-40e1-9092-d63e99022767\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.532016 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-dns-svc\") pod \"82c589d7-8e06-40e1-9092-d63e99022767\" (UID: \"82c589d7-8e06-40e1-9092-d63e99022767\") " Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.584611 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82c589d7-8e06-40e1-9092-d63e99022767-kube-api-access-5wqjs" (OuterVolumeSpecName: "kube-api-access-5wqjs") pod "82c589d7-8e06-40e1-9092-d63e99022767" (UID: "82c589d7-8e06-40e1-9092-d63e99022767"). InnerVolumeSpecName "kube-api-access-5wqjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.634109 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "82c589d7-8e06-40e1-9092-d63e99022767" (UID: "82c589d7-8e06-40e1-9092-d63e99022767"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.634763 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.634790 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wqjs\" (UniqueName: \"kubernetes.io/projected/82c589d7-8e06-40e1-9092-d63e99022767-kube-api-access-5wqjs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.649775 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82c589d7-8e06-40e1-9092-d63e99022767" (UID: "82c589d7-8e06-40e1-9092-d63e99022767"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.657023 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "82c589d7-8e06-40e1-9092-d63e99022767" (UID: "82c589d7-8e06-40e1-9092-d63e99022767"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.674973 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-config" (OuterVolumeSpecName: "config") pod "82c589d7-8e06-40e1-9092-d63e99022767" (UID: "82c589d7-8e06-40e1-9092-d63e99022767"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.738100 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.738136 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:49 crc kubenswrapper[4921]: I1210 13:16:49.738144 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c589d7-8e06-40e1-9092-d63e99022767-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:50 crc kubenswrapper[4921]: I1210 13:16:50.422925 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" event={"ID":"82c589d7-8e06-40e1-9092-d63e99022767","Type":"ContainerDied","Data":"88d7d0c7cb11e8267b54395fcf6f6868b7e54a78cba9b040d61ee5d181a05b1f"} Dec 10 13:16:50 crc kubenswrapper[4921]: I1210 13:16:50.423009 4921 scope.go:117] "RemoveContainer" containerID="7eb6cb8c5f2dd427ce401ff1158cce882c28c222099eea04b96cc1e6ed03b9bb" Dec 10 13:16:50 crc kubenswrapper[4921]: I1210 13:16:50.423298 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" Dec 10 13:16:50 crc kubenswrapper[4921]: I1210 13:16:50.481104 4921 scope.go:117] "RemoveContainer" containerID="69ca2c0fa0b7501664deec4c5c0dea103a84d18b98aef08521c8c2c0898fc1b0" Dec 10 13:16:50 crc kubenswrapper[4921]: I1210 13:16:50.506958 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-c6mhs"] Dec 10 13:16:50 crc kubenswrapper[4921]: I1210 13:16:50.539452 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-c6mhs"] Dec 10 13:16:51 crc kubenswrapper[4921]: I1210 13:16:51.201576 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82c589d7-8e06-40e1-9092-d63e99022767" path="/var/lib/kubelet/pods/82c589d7-8e06-40e1-9092-d63e99022767/volumes" Dec 10 13:16:51 crc kubenswrapper[4921]: I1210 13:16:51.436966 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ded7f5ee-1c28-48c4-bd62-5fa8f40608ef","Type":"ContainerStarted","Data":"02c4fb6a851028ccd603083a016164ffe37021e1b822a2949c0ef8c191273c38"} Dec 10 13:16:51 crc kubenswrapper[4921]: I1210 13:16:51.437109 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 13:16:51 crc kubenswrapper[4921]: I1210 13:16:51.469002 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.975483436 podStartE2EDuration="6.468980719s" podCreationTimestamp="2025-12-10 13:16:45 +0000 UTC" firstStartedPulling="2025-12-10 13:16:46.221985153 +0000 UTC m=+1203.438207077" lastFinishedPulling="2025-12-10 13:16:50.715482446 +0000 UTC m=+1207.931704360" observedRunningTime="2025-12-10 13:16:51.458032545 +0000 UTC m=+1208.674254479" watchObservedRunningTime="2025-12-10 13:16:51.468980719 +0000 UTC m=+1208.685202643" Dec 10 13:16:53 crc kubenswrapper[4921]: I1210 13:16:53.459180 4921 generic.go:334] "Generic (PLEG): container finished" podID="b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" containerID="eb90798c4a10f8296339c8c963a4a07c8ea2bd08b1a4a6006dd9bdd38041087f" exitCode=0 Dec 10 13:16:53 crc kubenswrapper[4921]: I1210 13:16:53.459464 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dx9x6" event={"ID":"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd","Type":"ContainerDied","Data":"eb90798c4a10f8296339c8c963a4a07c8ea2bd08b1a4a6006dd9bdd38041087f"} Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.174135 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b8cf6657-c6mhs" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.172:5353: i/o timeout" Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.813592 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.933442 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mplkc\" (UniqueName: \"kubernetes.io/projected/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-kube-api-access-mplkc\") pod \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.933592 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-config-data\") pod \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.933623 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-scripts\") pod \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.933643 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-combined-ca-bundle\") pod \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\" (UID: \"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd\") " Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.939347 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-scripts" (OuterVolumeSpecName: "scripts") pod "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" (UID: "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.939438 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-kube-api-access-mplkc" (OuterVolumeSpecName: "kube-api-access-mplkc") pod "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" (UID: "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd"). InnerVolumeSpecName "kube-api-access-mplkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.966227 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" (UID: "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:54 crc kubenswrapper[4921]: I1210 13:16:54.979187 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-config-data" (OuterVolumeSpecName: "config-data") pod "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" (UID: "b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.039630 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mplkc\" (UniqueName: \"kubernetes.io/projected/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-kube-api-access-mplkc\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.039666 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.039675 4921 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.039683 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.484624 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dx9x6" event={"ID":"b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd","Type":"ContainerDied","Data":"facbe65a17b657a3be8c2945efd4aa7fdf2e0f60bb72bf51a75b96a77ff02b31"} Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.484952 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="facbe65a17b657a3be8c2945efd4aa7fdf2e0f60bb72bf51a75b96a77ff02b31" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.485041 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dx9x6" Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.662660 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.662915 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-log" containerID="cri-o://712ab96d0e5a8a7b62fe495759c83a77161905eb3dd495f54a2326e27c7a7b37" gracePeriod=30 Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.663001 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-api" containerID="cri-o://09f3e93d6f5f4c2cd2b9d1dbcad906f238b66627431a76f79cc46d8967d0b090" gracePeriod=30 Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.691224 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.691523 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" containerName="nova-scheduler-scheduler" containerID="cri-o://9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909" gracePeriod=30 Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.722367 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.723538 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-metadata" containerID="cri-o://4977baddfd7bfc59078ee2e3e81fcedf383c27532d01418ad6dbf4d447a975ad" gracePeriod=30 Dec 10 13:16:55 crc kubenswrapper[4921]: I1210 13:16:55.723489 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-log" containerID="cri-o://fd0a6c2e3f05482d03664d7bcd66273ee0229b50f8d78e43e87c6d5fd306fcff" gracePeriod=30 Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.494366 4921 generic.go:334] "Generic (PLEG): container finished" podID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerID="fd0a6c2e3f05482d03664d7bcd66273ee0229b50f8d78e43e87c6d5fd306fcff" exitCode=143 Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.494586 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"132528a7-80b2-4c2b-89aa-9ab13bd8741a","Type":"ContainerDied","Data":"fd0a6c2e3f05482d03664d7bcd66273ee0229b50f8d78e43e87c6d5fd306fcff"} Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.496686 4921 generic.go:334] "Generic (PLEG): container finished" podID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerID="09f3e93d6f5f4c2cd2b9d1dbcad906f238b66627431a76f79cc46d8967d0b090" exitCode=0 Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.496717 4921 generic.go:334] "Generic (PLEG): container finished" podID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerID="712ab96d0e5a8a7b62fe495759c83a77161905eb3dd495f54a2326e27c7a7b37" exitCode=143 Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.496721 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"abcdde0d-505c-4cc7-9805-09b2fb3e562c","Type":"ContainerDied","Data":"09f3e93d6f5f4c2cd2b9d1dbcad906f238b66627431a76f79cc46d8967d0b090"} Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.496775 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"abcdde0d-505c-4cc7-9805-09b2fb3e562c","Type":"ContainerDied","Data":"712ab96d0e5a8a7b62fe495759c83a77161905eb3dd495f54a2326e27c7a7b37"} Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.496791 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"abcdde0d-505c-4cc7-9805-09b2fb3e562c","Type":"ContainerDied","Data":"e4de30c86852f3031579aa22a112d59cff2036842712ef62206ed9c611cf87f8"} Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.496802 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4de30c86852f3031579aa22a112d59cff2036842712ef62206ed9c611cf87f8" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.555594 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.692890 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-internal-tls-certs\") pod \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.692980 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-config-data\") pod \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.693061 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-combined-ca-bundle\") pod \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.693099 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-public-tls-certs\") pod \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.693153 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6dcm\" (UniqueName: \"kubernetes.io/projected/abcdde0d-505c-4cc7-9805-09b2fb3e562c-kube-api-access-d6dcm\") pod \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.693195 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abcdde0d-505c-4cc7-9805-09b2fb3e562c-logs\") pod \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\" (UID: \"abcdde0d-505c-4cc7-9805-09b2fb3e562c\") " Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.693919 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abcdde0d-505c-4cc7-9805-09b2fb3e562c-logs" (OuterVolumeSpecName: "logs") pod "abcdde0d-505c-4cc7-9805-09b2fb3e562c" (UID: "abcdde0d-505c-4cc7-9805-09b2fb3e562c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.725846 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abcdde0d-505c-4cc7-9805-09b2fb3e562c-kube-api-access-d6dcm" (OuterVolumeSpecName: "kube-api-access-d6dcm") pod "abcdde0d-505c-4cc7-9805-09b2fb3e562c" (UID: "abcdde0d-505c-4cc7-9805-09b2fb3e562c"). InnerVolumeSpecName "kube-api-access-d6dcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.780672 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abcdde0d-505c-4cc7-9805-09b2fb3e562c" (UID: "abcdde0d-505c-4cc7-9805-09b2fb3e562c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.794875 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6dcm\" (UniqueName: \"kubernetes.io/projected/abcdde0d-505c-4cc7-9805-09b2fb3e562c-kube-api-access-d6dcm\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.794906 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/abcdde0d-505c-4cc7-9805-09b2fb3e562c-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.794916 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.816611 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "abcdde0d-505c-4cc7-9805-09b2fb3e562c" (UID: "abcdde0d-505c-4cc7-9805-09b2fb3e562c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.826513 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-config-data" (OuterVolumeSpecName: "config-data") pod "abcdde0d-505c-4cc7-9805-09b2fb3e562c" (UID: "abcdde0d-505c-4cc7-9805-09b2fb3e562c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.878623 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "abcdde0d-505c-4cc7-9805-09b2fb3e562c" (UID: "abcdde0d-505c-4cc7-9805-09b2fb3e562c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.896797 4921 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.897009 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:56 crc kubenswrapper[4921]: I1210 13:16:56.897066 4921 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abcdde0d-505c-4cc7-9805-09b2fb3e562c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.310202 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.312026 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.313325 4921 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.313421 4921 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" containerName="nova-scheduler-scheduler" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.508093 4921 generic.go:334] "Generic (PLEG): container finished" podID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" containerID="9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909" exitCode=0 Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.508173 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.508179 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d48014d-698e-458a-98fe-40dc5e6fa3ab","Type":"ContainerDied","Data":"9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909"} Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.536224 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.551043 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.592322 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.592826 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="init" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.592848 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="init" Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.592857 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" containerName="nova-manage" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.592865 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" containerName="nova-manage" Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.592913 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="dnsmasq-dns" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.592924 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="dnsmasq-dns" Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.592940 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-api" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.592947 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-api" Dec 10 13:16:57 crc kubenswrapper[4921]: E1210 13:16:57.592964 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-log" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.592970 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-log" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.593153 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-api" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.593165 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" containerName="nova-manage" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.593181 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" containerName="nova-api-log" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.593190 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="82c589d7-8e06-40e1-9092-d63e99022767" containerName="dnsmasq-dns" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.594096 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.596292 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.597756 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.597929 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.607711 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.709786 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.709845 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-logs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.709882 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tctx\" (UniqueName: \"kubernetes.io/projected/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-kube-api-access-7tctx\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.709918 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-config-data\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.709947 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.710020 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-public-tls-certs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.710049 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.811278 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-combined-ca-bundle\") pod \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.811382 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gptnd\" (UniqueName: \"kubernetes.io/projected/0d48014d-698e-458a-98fe-40dc5e6fa3ab-kube-api-access-gptnd\") pod \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.811747 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-config-data\") pod \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\" (UID: \"0d48014d-698e-458a-98fe-40dc5e6fa3ab\") " Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.812052 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-public-tls-certs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.812207 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.812243 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-logs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.812277 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tctx\" (UniqueName: \"kubernetes.io/projected/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-kube-api-access-7tctx\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.812312 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-config-data\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.812341 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.813660 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-logs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.818038 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d48014d-698e-458a-98fe-40dc5e6fa3ab-kube-api-access-gptnd" (OuterVolumeSpecName: "kube-api-access-gptnd") pod "0d48014d-698e-458a-98fe-40dc5e6fa3ab" (UID: "0d48014d-698e-458a-98fe-40dc5e6fa3ab"). InnerVolumeSpecName "kube-api-access-gptnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.818546 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.818639 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.820054 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-public-tls-certs\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.821025 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-config-data\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.834929 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tctx\" (UniqueName: \"kubernetes.io/projected/cf045e82-19ac-4799-a0aa-6ca5cb6cee04-kube-api-access-7tctx\") pod \"nova-api-0\" (UID: \"cf045e82-19ac-4799-a0aa-6ca5cb6cee04\") " pod="openstack/nova-api-0" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.857110 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d48014d-698e-458a-98fe-40dc5e6fa3ab" (UID: "0d48014d-698e-458a-98fe-40dc5e6fa3ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.878111 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-config-data" (OuterVolumeSpecName: "config-data") pod "0d48014d-698e-458a-98fe-40dc5e6fa3ab" (UID: "0d48014d-698e-458a-98fe-40dc5e6fa3ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.914167 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.914199 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d48014d-698e-458a-98fe-40dc5e6fa3ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.914211 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gptnd\" (UniqueName: \"kubernetes.io/projected/0d48014d-698e-458a-98fe-40dc5e6fa3ab-kube-api-access-gptnd\") on node \"crc\" DevicePath \"\"" Dec 10 13:16:57 crc kubenswrapper[4921]: I1210 13:16:57.926745 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.466142 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 13:16:58 crc kubenswrapper[4921]: W1210 13:16:58.466547 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf045e82_19ac_4799_a0aa_6ca5cb6cee04.slice/crio-1f58827d0c74c3c218fc3a6d2639b226a5d4c2173e12e5c326781701a037fddc WatchSource:0}: Error finding container 1f58827d0c74c3c218fc3a6d2639b226a5d4c2173e12e5c326781701a037fddc: Status 404 returned error can't find the container with id 1f58827d0c74c3c218fc3a6d2639b226a5d4c2173e12e5c326781701a037fddc Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.518576 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf045e82-19ac-4799-a0aa-6ca5cb6cee04","Type":"ContainerStarted","Data":"1f58827d0c74c3c218fc3a6d2639b226a5d4c2173e12e5c326781701a037fddc"} Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.523063 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d48014d-698e-458a-98fe-40dc5e6fa3ab","Type":"ContainerDied","Data":"e1d06bb3e9e6e5751cd33835ebfa5352938bc292c8fd3a92ef2454296253875b"} Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.523106 4921 scope.go:117] "RemoveContainer" containerID="9da02d3a46a6b343cb8dde058ca6aa75e481651107cab320760c66e78447d909" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.523211 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.581509 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.596807 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.607224 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:58 crc kubenswrapper[4921]: E1210 13:16:58.607642 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" containerName="nova-scheduler-scheduler" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.607659 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" containerName="nova-scheduler-scheduler" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.607822 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" containerName="nova-scheduler-scheduler" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.608429 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.612295 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.617188 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.734936 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk64b\" (UniqueName: \"kubernetes.io/projected/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-kube-api-access-sk64b\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.734990 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-config-data\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.735221 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.837404 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.837750 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk64b\" (UniqueName: \"kubernetes.io/projected/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-kube-api-access-sk64b\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.837851 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-config-data\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.843246 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-config-data\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.854148 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk64b\" (UniqueName: \"kubernetes.io/projected/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-kube-api-access-sk64b\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.854896 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb69726b-f75d-4d33-8c26-355f8a6dc2ce-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"eb69726b-f75d-4d33-8c26-355f8a6dc2ce\") " pod="openstack/nova-scheduler-0" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.899422 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": read tcp 10.217.0.2:59530->10.217.0.176:8775: read: connection reset by peer" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.899645 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.176:8775/\": read tcp 10.217.0.2:59544->10.217.0.176:8775: read: connection reset by peer" Dec 10 13:16:58 crc kubenswrapper[4921]: I1210 13:16:58.929121 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.206283 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d48014d-698e-458a-98fe-40dc5e6fa3ab" path="/var/lib/kubelet/pods/0d48014d-698e-458a-98fe-40dc5e6fa3ab/volumes" Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.208537 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abcdde0d-505c-4cc7-9805-09b2fb3e562c" path="/var/lib/kubelet/pods/abcdde0d-505c-4cc7-9805-09b2fb3e562c/volumes" Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.383301 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 13:16:59 crc kubenswrapper[4921]: W1210 13:16:59.383935 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb69726b_f75d_4d33_8c26_355f8a6dc2ce.slice/crio-3743585c81951c289be25721c141fb85c15c018034225628b7a8da2909921f29 WatchSource:0}: Error finding container 3743585c81951c289be25721c141fb85c15c018034225628b7a8da2909921f29: Status 404 returned error can't find the container with id 3743585c81951c289be25721c141fb85c15c018034225628b7a8da2909921f29 Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.533489 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf045e82-19ac-4799-a0aa-6ca5cb6cee04","Type":"ContainerStarted","Data":"8af422583cbf0dc3ef31970839d099688007aecaeb3fd7f4386f7abc26023d72"} Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.533561 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf045e82-19ac-4799-a0aa-6ca5cb6cee04","Type":"ContainerStarted","Data":"e3f83901cd3b938093fac4f595abe75c88fecb25698ee220ee1c34d38173df3d"} Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.552784 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"eb69726b-f75d-4d33-8c26-355f8a6dc2ce","Type":"ContainerStarted","Data":"3743585c81951c289be25721c141fb85c15c018034225628b7a8da2909921f29"} Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.564849 4921 generic.go:334] "Generic (PLEG): container finished" podID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerID="4977baddfd7bfc59078ee2e3e81fcedf383c27532d01418ad6dbf4d447a975ad" exitCode=0 Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.564905 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"132528a7-80b2-4c2b-89aa-9ab13bd8741a","Type":"ContainerDied","Data":"4977baddfd7bfc59078ee2e3e81fcedf383c27532d01418ad6dbf4d447a975ad"} Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.571195 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.571174799 podStartE2EDuration="2.571174799s" podCreationTimestamp="2025-12-10 13:16:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:16:59.564033707 +0000 UTC m=+1216.780255631" watchObservedRunningTime="2025-12-10 13:16:59.571174799 +0000 UTC m=+1216.787396723" Dec 10 13:16:59 crc kubenswrapper[4921]: I1210 13:16:59.898183 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.065140 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-combined-ca-bundle\") pod \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.065219 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/132528a7-80b2-4c2b-89aa-9ab13bd8741a-logs\") pod \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.065259 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ckfn\" (UniqueName: \"kubernetes.io/projected/132528a7-80b2-4c2b-89aa-9ab13bd8741a-kube-api-access-8ckfn\") pod \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.065320 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-nova-metadata-tls-certs\") pod \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.065414 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-config-data\") pod \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\" (UID: \"132528a7-80b2-4c2b-89aa-9ab13bd8741a\") " Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.066984 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/132528a7-80b2-4c2b-89aa-9ab13bd8741a-logs" (OuterVolumeSpecName: "logs") pod "132528a7-80b2-4c2b-89aa-9ab13bd8741a" (UID: "132528a7-80b2-4c2b-89aa-9ab13bd8741a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.096950 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/132528a7-80b2-4c2b-89aa-9ab13bd8741a-kube-api-access-8ckfn" (OuterVolumeSpecName: "kube-api-access-8ckfn") pod "132528a7-80b2-4c2b-89aa-9ab13bd8741a" (UID: "132528a7-80b2-4c2b-89aa-9ab13bd8741a"). InnerVolumeSpecName "kube-api-access-8ckfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.104085 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-config-data" (OuterVolumeSpecName: "config-data") pod "132528a7-80b2-4c2b-89aa-9ab13bd8741a" (UID: "132528a7-80b2-4c2b-89aa-9ab13bd8741a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.123894 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "132528a7-80b2-4c2b-89aa-9ab13bd8741a" (UID: "132528a7-80b2-4c2b-89aa-9ab13bd8741a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.149806 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "132528a7-80b2-4c2b-89aa-9ab13bd8741a" (UID: "132528a7-80b2-4c2b-89aa-9ab13bd8741a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.167262 4921 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.167298 4921 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/132528a7-80b2-4c2b-89aa-9ab13bd8741a-logs\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.167311 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ckfn\" (UniqueName: \"kubernetes.io/projected/132528a7-80b2-4c2b-89aa-9ab13bd8741a-kube-api-access-8ckfn\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.167325 4921 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.167336 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/132528a7-80b2-4c2b-89aa-9ab13bd8741a-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.583822 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.584363 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"132528a7-80b2-4c2b-89aa-9ab13bd8741a","Type":"ContainerDied","Data":"810beed87634aa09a7d236f7dc5838f3ac91c49c895aee7df2c85666effbee8b"} Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.584536 4921 scope.go:117] "RemoveContainer" containerID="4977baddfd7bfc59078ee2e3e81fcedf383c27532d01418ad6dbf4d447a975ad" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.590899 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"eb69726b-f75d-4d33-8c26-355f8a6dc2ce","Type":"ContainerStarted","Data":"e7fa72adc7078808be65a4dfc8db3d379d5f907a5a7dcf2fffe617d78076eeef"} Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.612301 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.61228456 podStartE2EDuration="2.61228456s" podCreationTimestamp="2025-12-10 13:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:17:00.609274679 +0000 UTC m=+1217.825496593" watchObservedRunningTime="2025-12-10 13:17:00.61228456 +0000 UTC m=+1217.828506484" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.617088 4921 scope.go:117] "RemoveContainer" containerID="fd0a6c2e3f05482d03664d7bcd66273ee0229b50f8d78e43e87c6d5fd306fcff" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.632669 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.641763 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.661493 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:17:00 crc kubenswrapper[4921]: E1210 13:17:00.662177 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-metadata" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.662262 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-metadata" Dec 10 13:17:00 crc kubenswrapper[4921]: E1210 13:17:00.662365 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-log" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.662455 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-log" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.662747 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-metadata" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.662829 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" containerName="nova-metadata-log" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.664039 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.672084 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.672285 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.681899 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.782912 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb8rw\" (UniqueName: \"kubernetes.io/projected/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-kube-api-access-cb8rw\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.783207 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-logs\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.783317 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-config-data\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.783430 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.783665 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.885541 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.885602 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.885660 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb8rw\" (UniqueName: \"kubernetes.io/projected/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-kube-api-access-cb8rw\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.885702 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-logs\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.885746 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-config-data\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.886175 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-logs\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.913212 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.919923 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.919932 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-config-data\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.934061 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb8rw\" (UniqueName: \"kubernetes.io/projected/5baffea2-ac7e-4b10-adfa-1e418bbb1d05-kube-api-access-cb8rw\") pod \"nova-metadata-0\" (UID: \"5baffea2-ac7e-4b10-adfa-1e418bbb1d05\") " pod="openstack/nova-metadata-0" Dec 10 13:17:00 crc kubenswrapper[4921]: I1210 13:17:00.993442 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 13:17:01 crc kubenswrapper[4921]: I1210 13:17:01.208459 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="132528a7-80b2-4c2b-89aa-9ab13bd8741a" path="/var/lib/kubelet/pods/132528a7-80b2-4c2b-89aa-9ab13bd8741a/volumes" Dec 10 13:17:01 crc kubenswrapper[4921]: I1210 13:17:01.442483 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 13:17:01 crc kubenswrapper[4921]: W1210 13:17:01.455323 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5baffea2_ac7e_4b10_adfa_1e418bbb1d05.slice/crio-b4e5d367d708ce13c4043b0d7512bbeaba0cde63a4ada2abad7b9e083c56b0e0 WatchSource:0}: Error finding container b4e5d367d708ce13c4043b0d7512bbeaba0cde63a4ada2abad7b9e083c56b0e0: Status 404 returned error can't find the container with id b4e5d367d708ce13c4043b0d7512bbeaba0cde63a4ada2abad7b9e083c56b0e0 Dec 10 13:17:01 crc kubenswrapper[4921]: I1210 13:17:01.619699 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5baffea2-ac7e-4b10-adfa-1e418bbb1d05","Type":"ContainerStarted","Data":"b4e5d367d708ce13c4043b0d7512bbeaba0cde63a4ada2abad7b9e083c56b0e0"} Dec 10 13:17:02 crc kubenswrapper[4921]: I1210 13:17:02.630178 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5baffea2-ac7e-4b10-adfa-1e418bbb1d05","Type":"ContainerStarted","Data":"bab40b3c7a8c4d8aa4d299d8adc578fe3f731aa48bbc6575ac11ac1ae04af274"} Dec 10 13:17:02 crc kubenswrapper[4921]: I1210 13:17:02.630559 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5baffea2-ac7e-4b10-adfa-1e418bbb1d05","Type":"ContainerStarted","Data":"7f32838e11ec3ac6381938d546533fd6f39d93cd77980f84248aa363f44b44ea"} Dec 10 13:17:02 crc kubenswrapper[4921]: I1210 13:17:02.670621 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.6706027 podStartE2EDuration="2.6706027s" podCreationTimestamp="2025-12-10 13:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:17:02.651288262 +0000 UTC m=+1219.867510206" watchObservedRunningTime="2025-12-10 13:17:02.6706027 +0000 UTC m=+1219.886824634" Dec 10 13:17:03 crc kubenswrapper[4921]: I1210 13:17:03.929378 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 13:17:05 crc kubenswrapper[4921]: I1210 13:17:05.993502 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 13:17:05 crc kubenswrapper[4921]: I1210 13:17:05.993873 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 13:17:07 crc kubenswrapper[4921]: I1210 13:17:07.928178 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 13:17:07 crc kubenswrapper[4921]: I1210 13:17:07.928545 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 13:17:08 crc kubenswrapper[4921]: I1210 13:17:08.930368 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 13:17:08 crc kubenswrapper[4921]: I1210 13:17:08.940514 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cf045e82-19ac-4799-a0aa-6ca5cb6cee04" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.187:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 13:17:08 crc kubenswrapper[4921]: I1210 13:17:08.940769 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cf045e82-19ac-4799-a0aa-6ca5cb6cee04" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.187:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 13:17:08 crc kubenswrapper[4921]: I1210 13:17:08.961577 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 13:17:09 crc kubenswrapper[4921]: I1210 13:17:09.716811 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 13:17:10 crc kubenswrapper[4921]: I1210 13:17:10.993933 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 13:17:10 crc kubenswrapper[4921]: I1210 13:17:10.994262 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 13:17:12 crc kubenswrapper[4921]: I1210 13:17:12.005558 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5baffea2-ac7e-4b10-adfa-1e418bbb1d05" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 13:17:12 crc kubenswrapper[4921]: I1210 13:17:12.005579 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5baffea2-ac7e-4b10-adfa-1e418bbb1d05" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 13:17:15 crc kubenswrapper[4921]: I1210 13:17:15.819137 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 13:17:17 crc kubenswrapper[4921]: I1210 13:17:17.934246 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 13:17:17 crc kubenswrapper[4921]: I1210 13:17:17.934800 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 13:17:17 crc kubenswrapper[4921]: I1210 13:17:17.935180 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 13:17:17 crc kubenswrapper[4921]: I1210 13:17:17.939516 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 13:17:18 crc kubenswrapper[4921]: I1210 13:17:18.761599 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 13:17:18 crc kubenswrapper[4921]: I1210 13:17:18.771883 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 13:17:21 crc kubenswrapper[4921]: I1210 13:17:21.001126 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 13:17:21 crc kubenswrapper[4921]: I1210 13:17:21.001637 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 13:17:21 crc kubenswrapper[4921]: I1210 13:17:21.007300 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 13:17:21 crc kubenswrapper[4921]: I1210 13:17:21.010939 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 13:17:29 crc kubenswrapper[4921]: I1210 13:17:29.345102 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:17:30 crc kubenswrapper[4921]: I1210 13:17:30.227091 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:17:33 crc kubenswrapper[4921]: I1210 13:17:33.495708 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerName="rabbitmq" containerID="cri-o://b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e" gracePeriod=604796 Dec 10 13:17:34 crc kubenswrapper[4921]: I1210 13:17:34.200015 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="rabbitmq" containerID="cri-o://399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50" gracePeriod=604797 Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.458767 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.624422 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c651083f-4dd3-4963-892f-ddbc5ef1af05-pod-info\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.624462 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-plugins-conf\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.624481 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvw6t\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-kube-api-access-jvw6t\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.624507 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-server-conf\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625585 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625717 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625829 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-plugins\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625876 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-tls\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625901 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-erlang-cookie\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625961 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-config-data\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625980 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-confd\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.625998 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c651083f-4dd3-4963-892f-ddbc5ef1af05-erlang-cookie-secret\") pod \"c651083f-4dd3-4963-892f-ddbc5ef1af05\" (UID: \"c651083f-4dd3-4963-892f-ddbc5ef1af05\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.626646 4921 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.628639 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.629159 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.678734 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c651083f-4dd3-4963-892f-ddbc5ef1af05-pod-info" (OuterVolumeSpecName: "pod-info") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.681748 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.683384 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-config-data" (OuterVolumeSpecName: "config-data") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.683622 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c651083f-4dd3-4963-892f-ddbc5ef1af05-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.684357 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.688188 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-kube-api-access-jvw6t" (OuterVolumeSpecName: "kube-api-access-jvw6t") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "kube-api-access-jvw6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.734922 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.734950 4921 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c651083f-4dd3-4963-892f-ddbc5ef1af05-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.734960 4921 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c651083f-4dd3-4963-892f-ddbc5ef1af05-pod-info\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.734969 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvw6t\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-kube-api-access-jvw6t\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.734999 4921 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.735008 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.735016 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.735024 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.753879 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-server-conf" (OuterVolumeSpecName: "server-conf") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.777505 4921 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.808502 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.835793 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-plugins-conf\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.835867 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-server-conf\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.835891 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-plugins\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.835920 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-confd\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.835962 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-config-data\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836018 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-pod-info\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836036 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836077 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-tls\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836122 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-erlang-cookie-secret\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836150 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-erlang-cookie\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836172 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk5nq\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-kube-api-access-zk5nq\") pod \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\" (UID: \"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5\") " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836188 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836577 4921 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836594 4921 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c651083f-4dd3-4963-892f-ddbc5ef1af05-server-conf\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.836604 4921 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.837529 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.842929 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.843214 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-pod-info" (OuterVolumeSpecName: "pod-info") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.846535 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.846712 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c651083f-4dd3-4963-892f-ddbc5ef1af05" (UID: "c651083f-4dd3-4963-892f-ddbc5ef1af05"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.849530 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-kube-api-access-zk5nq" (OuterVolumeSpecName: "kube-api-access-zk5nq") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "kube-api-access-zk5nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.856596 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.859241 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.888913 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-config-data" (OuterVolumeSpecName: "config-data") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.916122 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-server-conf" (OuterVolumeSpecName: "server-conf") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938430 4921 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-server-conf\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938479 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938490 4921 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938498 4921 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-pod-info\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938545 4921 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938574 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938583 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c651083f-4dd3-4963-892f-ddbc5ef1af05-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938591 4921 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938600 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.938632 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk5nq\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-kube-api-access-zk5nq\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.961167 4921 generic.go:334] "Generic (PLEG): container finished" podID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerID="399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50" exitCode=0 Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.961227 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5","Type":"ContainerDied","Data":"399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50"} Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.961256 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e098cd5a-992f-42a0-a89e-d8dd59dbbcc5","Type":"ContainerDied","Data":"b70740192fffb66ab4383666a58c9f78e25c8f205c631e3260069c5c2bee2735"} Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.961275 4921 scope.go:117] "RemoveContainer" containerID="399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.961424 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.961963 4921 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.978663 4921 generic.go:334] "Generic (PLEG): container finished" podID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerID="b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e" exitCode=0 Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.978718 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c651083f-4dd3-4963-892f-ddbc5ef1af05","Type":"ContainerDied","Data":"b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e"} Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.978751 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c651083f-4dd3-4963-892f-ddbc5ef1af05","Type":"ContainerDied","Data":"b98b85ea672a94d33b52ead314c1b3c811ba91dc059d7cef791b66117c491386"} Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.978818 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.981902 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" (UID: "e098cd5a-992f-42a0-a89e-d8dd59dbbcc5"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:40 crc kubenswrapper[4921]: I1210 13:17:40.989113 4921 scope.go:117] "RemoveContainer" containerID="be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.015680 4921 scope.go:117] "RemoveContainer" containerID="399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.018600 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50\": container with ID starting with 399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50 not found: ID does not exist" containerID="399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.018644 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50"} err="failed to get container status \"399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50\": rpc error: code = NotFound desc = could not find container \"399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50\": container with ID starting with 399fc68b6786c375a7252b97a1d173f107a4d96b48bfd8d89ccc69be61078e50 not found: ID does not exist" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.018669 4921 scope.go:117] "RemoveContainer" containerID="be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.018907 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9\": container with ID starting with be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9 not found: ID does not exist" containerID="be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.018939 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9"} err="failed to get container status \"be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9\": rpc error: code = NotFound desc = could not find container \"be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9\": container with ID starting with be2e3565a694652e9fc8296be0cbb14d87a944c9148a1b7742d5d0c31754e2a9 not found: ID does not exist" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.018953 4921 scope.go:117] "RemoveContainer" containerID="b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.029067 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.040878 4921 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.040905 4921 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.054443 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.070744 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.071128 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerName="rabbitmq" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.071145 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerName="rabbitmq" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.071160 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="rabbitmq" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.071167 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="rabbitmq" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.071179 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="setup-container" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.071185 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="setup-container" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.071201 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerName="setup-container" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.071207 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerName="setup-container" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.071387 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" containerName="rabbitmq" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.071419 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="rabbitmq" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.072369 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.075379 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.076568 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-2k2nw" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.077235 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.077407 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.077515 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.077656 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.077753 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.077921 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.117545 4921 scope.go:117] "RemoveContainer" containerID="ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.159565 4921 scope.go:117] "RemoveContainer" containerID="b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.160135 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e\": container with ID starting with b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e not found: ID does not exist" containerID="b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.160168 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e"} err="failed to get container status \"b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e\": rpc error: code = NotFound desc = could not find container \"b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e\": container with ID starting with b183c5c69fa2e6cff7dd5a97eb591e8d23a80510f008b8368fa1c71b8a19f60e not found: ID does not exist" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.160193 4921 scope.go:117] "RemoveContainer" containerID="ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44" Dec 10 13:17:41 crc kubenswrapper[4921]: E1210 13:17:41.160414 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44\": container with ID starting with ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44 not found: ID does not exist" containerID="ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.160444 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44"} err="failed to get container status \"ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44\": rpc error: code = NotFound desc = could not find container \"ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44\": container with ID starting with ceab0d7e68ee169e4cbcf90d582662eb65c586d8bb19ae2df9006de3739acd44 not found: ID does not exist" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.203130 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c651083f-4dd3-4963-892f-ddbc5ef1af05" path="/var/lib/kubelet/pods/c651083f-4dd3-4963-892f-ddbc5ef1af05/volumes" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244434 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244540 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj5p9\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-kube-api-access-pj5p9\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244570 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244599 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244791 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-config-data\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244860 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244926 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.244972 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/763f1047-618c-4167-9d5d-27d387e8adf4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.245077 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/763f1047-618c-4167-9d5d-27d387e8adf4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.245123 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.245169 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.287368 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.297054 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.310359 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.313363 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321300 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321613 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321664 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321784 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fzqbd" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321821 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321902 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.321539 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347204 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347257 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347325 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj5p9\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-kube-api-access-pj5p9\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347344 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347368 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347424 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-config-data\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347445 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347461 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347482 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/763f1047-618c-4167-9d5d-27d387e8adf4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347505 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/763f1047-618c-4167-9d5d-27d387e8adf4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347504 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.347522 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.349841 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.349995 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.352197 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.352775 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.353245 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.353657 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.355237 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/763f1047-618c-4167-9d5d-27d387e8adf4-config-data\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.355889 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/763f1047-618c-4167-9d5d-27d387e8adf4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.369407 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.384744 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj5p9\" (UniqueName: \"kubernetes.io/projected/763f1047-618c-4167-9d5d-27d387e8adf4-kube-api-access-pj5p9\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.396016 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/763f1047-618c-4167-9d5d-27d387e8adf4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.413081 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"763f1047-618c-4167-9d5d-27d387e8adf4\") " pod="openstack/rabbitmq-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448729 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448798 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448818 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448884 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448905 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/695d1b33-f9c1-44bb-b41d-9f6de71c3527-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448931 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/695d1b33-f9c1-44bb-b41d-9f6de71c3527-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448961 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.448983 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.449023 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.449042 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngcf6\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-kube-api-access-ngcf6\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.449080 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550229 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/695d1b33-f9c1-44bb-b41d-9f6de71c3527-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550283 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550304 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550351 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550374 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngcf6\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-kube-api-access-ngcf6\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550429 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550448 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550475 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550493 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550525 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.550541 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/695d1b33-f9c1-44bb-b41d-9f6de71c3527-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.552495 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.552766 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.553699 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.553853 4921 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.557507 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.557857 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/695d1b33-f9c1-44bb-b41d-9f6de71c3527-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.557974 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.558539 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.560465 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/695d1b33-f9c1-44bb-b41d-9f6de71c3527-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.574150 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/695d1b33-f9c1-44bb-b41d-9f6de71c3527-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.578670 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngcf6\" (UniqueName: \"kubernetes.io/projected/695d1b33-f9c1-44bb-b41d-9f6de71c3527-kube-api-access-ngcf6\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.580352 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"695d1b33-f9c1-44bb-b41d-9f6de71c3527\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.629700 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:17:41 crc kubenswrapper[4921]: I1210 13:17:41.706226 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 13:17:42 crc kubenswrapper[4921]: I1210 13:17:42.089192 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 13:17:42 crc kubenswrapper[4921]: I1210 13:17:42.237373 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 13:17:43 crc kubenswrapper[4921]: I1210 13:17:43.001497 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"695d1b33-f9c1-44bb-b41d-9f6de71c3527","Type":"ContainerStarted","Data":"219399dfdd9d3593db27fb07d826d7a9f91ea666a3ec9434dee84df34d670014"} Dec 10 13:17:43 crc kubenswrapper[4921]: I1210 13:17:43.002758 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"763f1047-618c-4167-9d5d-27d387e8adf4","Type":"ContainerStarted","Data":"3c501940f0596af3ed9a5d66848bd144d6b1e634e9bc4ea460b010667a922f0e"} Dec 10 13:17:43 crc kubenswrapper[4921]: I1210 13:17:43.204983 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" path="/var/lib/kubelet/pods/e098cd5a-992f-42a0-a89e-d8dd59dbbcc5/volumes" Dec 10 13:17:44 crc kubenswrapper[4921]: I1210 13:17:44.011877 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"763f1047-618c-4167-9d5d-27d387e8adf4","Type":"ContainerStarted","Data":"f6778fd93feffe10e6d77fb585dfab81721c69ee53b3dcdc24eae2b5d40c0e17"} Dec 10 13:17:44 crc kubenswrapper[4921]: I1210 13:17:44.013582 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"695d1b33-f9c1-44bb-b41d-9f6de71c3527","Type":"ContainerStarted","Data":"4d35cdb45e23249663f7e787ff6f50edbf0946a5230a41a4527a8b8df37d69f7"} Dec 10 13:17:45 crc kubenswrapper[4921]: I1210 13:17:45.586605 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e098cd5a-992f-42a0-a89e-d8dd59dbbcc5" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: i/o timeout" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.069708 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-nr89k"] Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.071857 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.077358 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.087877 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-nr89k"] Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.171871 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.172206 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.172231 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp55v\" (UniqueName: \"kubernetes.io/projected/1a1ec8c2-c5a2-4bae-8951-95348df9974d-kube-api-access-wp55v\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.172268 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.172355 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-config\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.172443 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-dns-svc\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.273691 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-config\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.274568 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-config\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.274777 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-dns-svc\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.274880 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.274914 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.274942 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp55v\" (UniqueName: \"kubernetes.io/projected/1a1ec8c2-c5a2-4bae-8951-95348df9974d-kube-api-access-wp55v\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.275055 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.276153 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-dns-svc\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.276602 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.276784 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.276890 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.296286 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp55v\" (UniqueName: \"kubernetes.io/projected/1a1ec8c2-c5a2-4bae-8951-95348df9974d-kube-api-access-wp55v\") pod \"dnsmasq-dns-578b8d767c-nr89k\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.391306 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:48 crc kubenswrapper[4921]: I1210 13:17:48.908720 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-nr89k"] Dec 10 13:17:49 crc kubenswrapper[4921]: I1210 13:17:49.075953 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" event={"ID":"1a1ec8c2-c5a2-4bae-8951-95348df9974d","Type":"ContainerStarted","Data":"3a4877226b3636eb4d23064f5be8a71033c6779b637c99a95357db8f79794538"} Dec 10 13:17:50 crc kubenswrapper[4921]: I1210 13:17:50.085099 4921 generic.go:334] "Generic (PLEG): container finished" podID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerID="c7df3f02d9727fdeb3675cbc5f631ddf67c5694234033e741ee9d01ad1e5d0b2" exitCode=0 Dec 10 13:17:50 crc kubenswrapper[4921]: I1210 13:17:50.085141 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" event={"ID":"1a1ec8c2-c5a2-4bae-8951-95348df9974d","Type":"ContainerDied","Data":"c7df3f02d9727fdeb3675cbc5f631ddf67c5694234033e741ee9d01ad1e5d0b2"} Dec 10 13:17:51 crc kubenswrapper[4921]: I1210 13:17:51.097372 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" event={"ID":"1a1ec8c2-c5a2-4bae-8951-95348df9974d","Type":"ContainerStarted","Data":"d38a8fd1642182cb9d10a65742595caf46364fe02ef2252808b0e42de1dc3218"} Dec 10 13:17:51 crc kubenswrapper[4921]: I1210 13:17:51.097811 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:51 crc kubenswrapper[4921]: I1210 13:17:51.128918 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" podStartSLOduration=3.128900068 podStartE2EDuration="3.128900068s" podCreationTimestamp="2025-12-10 13:17:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:17:51.127711096 +0000 UTC m=+1268.343933030" watchObservedRunningTime="2025-12-10 13:17:51.128900068 +0000 UTC m=+1268.345121992" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.392628 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.464943 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5tzmd"] Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.465161 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="dnsmasq-dns" containerID="cri-o://0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c" gracePeriod=10 Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.660647 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-rtdkp"] Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.670473 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.681628 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-rtdkp"] Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.711896 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbpv2\" (UniqueName: \"kubernetes.io/projected/16952130-d663-4f2f-bb74-724fac2ec6dd-kube-api-access-tbpv2\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.712005 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.712026 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.712049 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.712187 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-dns-svc\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.712215 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-config\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.813952 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbpv2\" (UniqueName: \"kubernetes.io/projected/16952130-d663-4f2f-bb74-724fac2ec6dd-kube-api-access-tbpv2\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.814034 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.814051 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.814071 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.814137 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-dns-svc\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.814167 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-config\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.815835 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.816031 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-config\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.816675 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-dns-svc\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.817044 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.817184 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/16952130-d663-4f2f-bb74-724fac2ec6dd-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.855199 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbpv2\" (UniqueName: \"kubernetes.io/projected/16952130-d663-4f2f-bb74-724fac2ec6dd-kube-api-access-tbpv2\") pod \"dnsmasq-dns-667ff9c869-rtdkp\" (UID: \"16952130-d663-4f2f-bb74-724fac2ec6dd\") " pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:58 crc kubenswrapper[4921]: I1210 13:17:58.996927 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.000930 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.018291 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-nb\") pod \"6af75187-fe5e-4c40-9ae4-ab5c70913328\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.018359 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j74x8\" (UniqueName: \"kubernetes.io/projected/6af75187-fe5e-4c40-9ae4-ab5c70913328-kube-api-access-j74x8\") pod \"6af75187-fe5e-4c40-9ae4-ab5c70913328\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.018379 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-config\") pod \"6af75187-fe5e-4c40-9ae4-ab5c70913328\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.018467 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-dns-svc\") pod \"6af75187-fe5e-4c40-9ae4-ab5c70913328\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.018532 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-sb\") pod \"6af75187-fe5e-4c40-9ae4-ab5c70913328\" (UID: \"6af75187-fe5e-4c40-9ae4-ab5c70913328\") " Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.023318 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6af75187-fe5e-4c40-9ae4-ab5c70913328-kube-api-access-j74x8" (OuterVolumeSpecName: "kube-api-access-j74x8") pod "6af75187-fe5e-4c40-9ae4-ab5c70913328" (UID: "6af75187-fe5e-4c40-9ae4-ab5c70913328"). InnerVolumeSpecName "kube-api-access-j74x8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.114031 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6af75187-fe5e-4c40-9ae4-ab5c70913328" (UID: "6af75187-fe5e-4c40-9ae4-ab5c70913328"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.134070 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6af75187-fe5e-4c40-9ae4-ab5c70913328" (UID: "6af75187-fe5e-4c40-9ae4-ab5c70913328"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.134454 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.134481 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.134503 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j74x8\" (UniqueName: \"kubernetes.io/projected/6af75187-fe5e-4c40-9ae4-ab5c70913328-kube-api-access-j74x8\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.134992 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-config" (OuterVolumeSpecName: "config") pod "6af75187-fe5e-4c40-9ae4-ab5c70913328" (UID: "6af75187-fe5e-4c40-9ae4-ab5c70913328"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.147189 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6af75187-fe5e-4c40-9ae4-ab5c70913328" (UID: "6af75187-fe5e-4c40-9ae4-ab5c70913328"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.187955 4921 generic.go:334] "Generic (PLEG): container finished" podID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerID="0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c" exitCode=0 Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.188000 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" event={"ID":"6af75187-fe5e-4c40-9ae4-ab5c70913328","Type":"ContainerDied","Data":"0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c"} Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.188025 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" event={"ID":"6af75187-fe5e-4c40-9ae4-ab5c70913328","Type":"ContainerDied","Data":"7407ba46870baf686a3180099e43bea72de3f517e7990cc5b3adca8290662212"} Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.188042 4921 scope.go:117] "RemoveContainer" containerID="0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.188161 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.236233 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.236270 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6af75187-fe5e-4c40-9ae4-ab5c70913328-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.239570 4921 scope.go:117] "RemoveContainer" containerID="22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.248767 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5tzmd"] Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.257592 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-5tzmd"] Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.268417 4921 scope.go:117] "RemoveContainer" containerID="0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c" Dec 10 13:17:59 crc kubenswrapper[4921]: E1210 13:17:59.268896 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c\": container with ID starting with 0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c not found: ID does not exist" containerID="0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.268945 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c"} err="failed to get container status \"0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c\": rpc error: code = NotFound desc = could not find container \"0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c\": container with ID starting with 0e05c3e05f7e70b42d5b9dbed23ae0fa2abba889befb272d63fe15f221e2e64c not found: ID does not exist" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.268971 4921 scope.go:117] "RemoveContainer" containerID="22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4" Dec 10 13:17:59 crc kubenswrapper[4921]: E1210 13:17:59.269307 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4\": container with ID starting with 22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4 not found: ID does not exist" containerID="22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4" Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.269336 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4"} err="failed to get container status \"22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4\": rpc error: code = NotFound desc = could not find container \"22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4\": container with ID starting with 22f148e50263c88342f743bb6ac79442e9dab129600f0958944288807dae9aa4 not found: ID does not exist" Dec 10 13:17:59 crc kubenswrapper[4921]: W1210 13:17:59.505361 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16952130_d663_4f2f_bb74_724fac2ec6dd.slice/crio-884c08a314a3241c4496023f905e0c848f113f7fb31a6f2a07bcc1c3651afccd WatchSource:0}: Error finding container 884c08a314a3241c4496023f905e0c848f113f7fb31a6f2a07bcc1c3651afccd: Status 404 returned error can't find the container with id 884c08a314a3241c4496023f905e0c848f113f7fb31a6f2a07bcc1c3651afccd Dec 10 13:17:59 crc kubenswrapper[4921]: I1210 13:17:59.510528 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-rtdkp"] Dec 10 13:18:00 crc kubenswrapper[4921]: I1210 13:18:00.196382 4921 generic.go:334] "Generic (PLEG): container finished" podID="16952130-d663-4f2f-bb74-724fac2ec6dd" containerID="0c6b91aa1375d6962eca1f495eb4f950ec53fb346939df844858527a5955b66a" exitCode=0 Dec 10 13:18:00 crc kubenswrapper[4921]: I1210 13:18:00.196532 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" event={"ID":"16952130-d663-4f2f-bb74-724fac2ec6dd","Type":"ContainerDied","Data":"0c6b91aa1375d6962eca1f495eb4f950ec53fb346939df844858527a5955b66a"} Dec 10 13:18:00 crc kubenswrapper[4921]: I1210 13:18:00.196781 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" event={"ID":"16952130-d663-4f2f-bb74-724fac2ec6dd","Type":"ContainerStarted","Data":"884c08a314a3241c4496023f905e0c848f113f7fb31a6f2a07bcc1c3651afccd"} Dec 10 13:18:01 crc kubenswrapper[4921]: I1210 13:18:01.215957 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" path="/var/lib/kubelet/pods/6af75187-fe5e-4c40-9ae4-ab5c70913328/volumes" Dec 10 13:18:01 crc kubenswrapper[4921]: I1210 13:18:01.217386 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" event={"ID":"16952130-d663-4f2f-bb74-724fac2ec6dd","Type":"ContainerStarted","Data":"912fd7784305bc65bc25ed1bc23f263dc50f293db25ba7c4602edcc8e10eeb9d"} Dec 10 13:18:01 crc kubenswrapper[4921]: I1210 13:18:01.217494 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:18:01 crc kubenswrapper[4921]: I1210 13:18:01.235623 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" podStartSLOduration=3.235606225 podStartE2EDuration="3.235606225s" podCreationTimestamp="2025-12-10 13:17:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:18:01.22906604 +0000 UTC m=+1278.445287994" watchObservedRunningTime="2025-12-10 13:18:01.235606225 +0000 UTC m=+1278.451828149" Dec 10 13:18:03 crc kubenswrapper[4921]: I1210 13:18:03.770111 4921 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-68d4b6d797-5tzmd" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.182:5353: i/o timeout" Dec 10 13:18:08 crc kubenswrapper[4921]: I1210 13:18:08.998246 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-667ff9c869-rtdkp" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.059057 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-nr89k"] Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.059338 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerName="dnsmasq-dns" containerID="cri-o://d38a8fd1642182cb9d10a65742595caf46364fe02ef2252808b0e42de1dc3218" gracePeriod=10 Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.280637 4921 generic.go:334] "Generic (PLEG): container finished" podID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerID="d38a8fd1642182cb9d10a65742595caf46364fe02ef2252808b0e42de1dc3218" exitCode=0 Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.280735 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" event={"ID":"1a1ec8c2-c5a2-4bae-8951-95348df9974d","Type":"ContainerDied","Data":"d38a8fd1642182cb9d10a65742595caf46364fe02ef2252808b0e42de1dc3218"} Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.545541 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.621809 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-nb\") pod \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.621879 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-openstack-edpm-ipam\") pod \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.621958 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-dns-svc\") pod \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.622016 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-sb\") pod \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.622584 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-config\") pod \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.622664 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp55v\" (UniqueName: \"kubernetes.io/projected/1a1ec8c2-c5a2-4bae-8951-95348df9974d-kube-api-access-wp55v\") pod \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\" (UID: \"1a1ec8c2-c5a2-4bae-8951-95348df9974d\") " Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.629783 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a1ec8c2-c5a2-4bae-8951-95348df9974d-kube-api-access-wp55v" (OuterVolumeSpecName: "kube-api-access-wp55v") pod "1a1ec8c2-c5a2-4bae-8951-95348df9974d" (UID: "1a1ec8c2-c5a2-4bae-8951-95348df9974d"). InnerVolumeSpecName "kube-api-access-wp55v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.689605 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a1ec8c2-c5a2-4bae-8951-95348df9974d" (UID: "1a1ec8c2-c5a2-4bae-8951-95348df9974d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.699280 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "1a1ec8c2-c5a2-4bae-8951-95348df9974d" (UID: "1a1ec8c2-c5a2-4bae-8951-95348df9974d"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.705137 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a1ec8c2-c5a2-4bae-8951-95348df9974d" (UID: "1a1ec8c2-c5a2-4bae-8951-95348df9974d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.705745 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-config" (OuterVolumeSpecName: "config") pod "1a1ec8c2-c5a2-4bae-8951-95348df9974d" (UID: "1a1ec8c2-c5a2-4bae-8951-95348df9974d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.721191 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a1ec8c2-c5a2-4bae-8951-95348df9974d" (UID: "1a1ec8c2-c5a2-4bae-8951-95348df9974d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.729189 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.729228 4921 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.729242 4921 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.729251 4921 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.729262 4921 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a1ec8c2-c5a2-4bae-8951-95348df9974d-config\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:09 crc kubenswrapper[4921]: I1210 13:18:09.729270 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp55v\" (UniqueName: \"kubernetes.io/projected/1a1ec8c2-c5a2-4bae-8951-95348df9974d-kube-api-access-wp55v\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:10 crc kubenswrapper[4921]: I1210 13:18:10.291208 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" event={"ID":"1a1ec8c2-c5a2-4bae-8951-95348df9974d","Type":"ContainerDied","Data":"3a4877226b3636eb4d23064f5be8a71033c6779b637c99a95357db8f79794538"} Dec 10 13:18:10 crc kubenswrapper[4921]: I1210 13:18:10.291288 4921 scope.go:117] "RemoveContainer" containerID="d38a8fd1642182cb9d10a65742595caf46364fe02ef2252808b0e42de1dc3218" Dec 10 13:18:10 crc kubenswrapper[4921]: I1210 13:18:10.291301 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-nr89k" Dec 10 13:18:10 crc kubenswrapper[4921]: I1210 13:18:10.316604 4921 scope.go:117] "RemoveContainer" containerID="c7df3f02d9727fdeb3675cbc5f631ddf67c5694234033e741ee9d01ad1e5d0b2" Dec 10 13:18:10 crc kubenswrapper[4921]: I1210 13:18:10.324148 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-nr89k"] Dec 10 13:18:10 crc kubenswrapper[4921]: I1210 13:18:10.332957 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-nr89k"] Dec 10 13:18:11 crc kubenswrapper[4921]: I1210 13:18:11.201950 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" path="/var/lib/kubelet/pods/1a1ec8c2-c5a2-4bae-8951-95348df9974d/volumes" Dec 10 13:18:16 crc kubenswrapper[4921]: I1210 13:18:16.371918 4921 generic.go:334] "Generic (PLEG): container finished" podID="763f1047-618c-4167-9d5d-27d387e8adf4" containerID="f6778fd93feffe10e6d77fb585dfab81721c69ee53b3dcdc24eae2b5d40c0e17" exitCode=0 Dec 10 13:18:16 crc kubenswrapper[4921]: I1210 13:18:16.372109 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"763f1047-618c-4167-9d5d-27d387e8adf4","Type":"ContainerDied","Data":"f6778fd93feffe10e6d77fb585dfab81721c69ee53b3dcdc24eae2b5d40c0e17"} Dec 10 13:18:16 crc kubenswrapper[4921]: I1210 13:18:16.375222 4921 generic.go:334] "Generic (PLEG): container finished" podID="695d1b33-f9c1-44bb-b41d-9f6de71c3527" containerID="4d35cdb45e23249663f7e787ff6f50edbf0946a5230a41a4527a8b8df37d69f7" exitCode=0 Dec 10 13:18:16 crc kubenswrapper[4921]: I1210 13:18:16.375255 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"695d1b33-f9c1-44bb-b41d-9f6de71c3527","Type":"ContainerDied","Data":"4d35cdb45e23249663f7e787ff6f50edbf0946a5230a41a4527a8b8df37d69f7"} Dec 10 13:18:16 crc kubenswrapper[4921]: I1210 13:18:16.710701 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:18:16 crc kubenswrapper[4921]: I1210 13:18:16.711298 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:18:17 crc kubenswrapper[4921]: I1210 13:18:17.385794 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"695d1b33-f9c1-44bb-b41d-9f6de71c3527","Type":"ContainerStarted","Data":"966bd58a3e3eb742a0a665e37d3fbfcdae7322be73d5fd4d887d4b5aae07e78c"} Dec 10 13:18:17 crc kubenswrapper[4921]: I1210 13:18:17.386252 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:18:17 crc kubenswrapper[4921]: I1210 13:18:17.387950 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"763f1047-618c-4167-9d5d-27d387e8adf4","Type":"ContainerStarted","Data":"ca08981628df344f226faf68a836f83b40c62b7061a8bfccbd74954fc4f896db"} Dec 10 13:18:17 crc kubenswrapper[4921]: I1210 13:18:17.388159 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 10 13:18:17 crc kubenswrapper[4921]: I1210 13:18:17.413540 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.413521837 podStartE2EDuration="36.413521837s" podCreationTimestamp="2025-12-10 13:17:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:18:17.410203168 +0000 UTC m=+1294.626425102" watchObservedRunningTime="2025-12-10 13:18:17.413521837 +0000 UTC m=+1294.629743771" Dec 10 13:18:17 crc kubenswrapper[4921]: I1210 13:18:17.441555 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.441535108 podStartE2EDuration="36.441535108s" podCreationTimestamp="2025-12-10 13:17:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 13:18:17.436647907 +0000 UTC m=+1294.652869841" watchObservedRunningTime="2025-12-10 13:18:17.441535108 +0000 UTC m=+1294.657757052" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.224760 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl"] Dec 10 13:18:19 crc kubenswrapper[4921]: E1210 13:18:19.225371 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="dnsmasq-dns" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.225383 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="dnsmasq-dns" Dec 10 13:18:19 crc kubenswrapper[4921]: E1210 13:18:19.225466 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="init" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.225472 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="init" Dec 10 13:18:19 crc kubenswrapper[4921]: E1210 13:18:19.225487 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerName="init" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.225493 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerName="init" Dec 10 13:18:19 crc kubenswrapper[4921]: E1210 13:18:19.225504 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerName="dnsmasq-dns" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.225510 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerName="dnsmasq-dns" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.225669 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="6af75187-fe5e-4c40-9ae4-ab5c70913328" containerName="dnsmasq-dns" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.225691 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a1ec8c2-c5a2-4bae-8951-95348df9974d" containerName="dnsmasq-dns" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.226254 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.228489 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.228769 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.230410 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.234340 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl"] Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.242640 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.392497 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.392718 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svhfq\" (UniqueName: \"kubernetes.io/projected/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-kube-api-access-svhfq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.393070 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.393120 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.494541 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.494590 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.494668 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.494717 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svhfq\" (UniqueName: \"kubernetes.io/projected/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-kube-api-access-svhfq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.499789 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.500080 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.510765 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.516793 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svhfq\" (UniqueName: \"kubernetes.io/projected/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-kube-api-access-svhfq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:19 crc kubenswrapper[4921]: I1210 13:18:19.543753 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:20 crc kubenswrapper[4921]: I1210 13:18:20.120039 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl"] Dec 10 13:18:20 crc kubenswrapper[4921]: I1210 13:18:20.413748 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" event={"ID":"5bb2996f-5e63-42f1-a2a7-757a7fbcff35","Type":"ContainerStarted","Data":"89032fb2708cd38edd1317882fb0d1c2b0d4997a443647327c182eb3d9ed2779"} Dec 10 13:18:31 crc kubenswrapper[4921]: I1210 13:18:31.634256 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 10 13:18:31 crc kubenswrapper[4921]: I1210 13:18:31.710760 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 10 13:18:32 crc kubenswrapper[4921]: I1210 13:18:32.538583 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" event={"ID":"5bb2996f-5e63-42f1-a2a7-757a7fbcff35","Type":"ContainerStarted","Data":"614ccc9aef922a21076999161608644bba5e9c831f82731b6fc1e77041eaddf3"} Dec 10 13:18:46 crc kubenswrapper[4921]: I1210 13:18:46.711402 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:18:46 crc kubenswrapper[4921]: I1210 13:18:46.711864 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:18:48 crc kubenswrapper[4921]: I1210 13:18:48.659640 4921 generic.go:334] "Generic (PLEG): container finished" podID="5bb2996f-5e63-42f1-a2a7-757a7fbcff35" containerID="614ccc9aef922a21076999161608644bba5e9c831f82731b6fc1e77041eaddf3" exitCode=0 Dec 10 13:18:48 crc kubenswrapper[4921]: I1210 13:18:48.659714 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" event={"ID":"5bb2996f-5e63-42f1-a2a7-757a7fbcff35","Type":"ContainerDied","Data":"614ccc9aef922a21076999161608644bba5e9c831f82731b6fc1e77041eaddf3"} Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.119458 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.300548 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-inventory\") pod \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.300655 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svhfq\" (UniqueName: \"kubernetes.io/projected/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-kube-api-access-svhfq\") pod \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.300784 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-repo-setup-combined-ca-bundle\") pod \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.300820 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-ssh-key\") pod \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\" (UID: \"5bb2996f-5e63-42f1-a2a7-757a7fbcff35\") " Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.309184 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "5bb2996f-5e63-42f1-a2a7-757a7fbcff35" (UID: "5bb2996f-5e63-42f1-a2a7-757a7fbcff35"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.309877 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-kube-api-access-svhfq" (OuterVolumeSpecName: "kube-api-access-svhfq") pod "5bb2996f-5e63-42f1-a2a7-757a7fbcff35" (UID: "5bb2996f-5e63-42f1-a2a7-757a7fbcff35"). InnerVolumeSpecName "kube-api-access-svhfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.327641 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5bb2996f-5e63-42f1-a2a7-757a7fbcff35" (UID: "5bb2996f-5e63-42f1-a2a7-757a7fbcff35"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.339995 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-inventory" (OuterVolumeSpecName: "inventory") pod "5bb2996f-5e63-42f1-a2a7-757a7fbcff35" (UID: "5bb2996f-5e63-42f1-a2a7-757a7fbcff35"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.403125 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svhfq\" (UniqueName: \"kubernetes.io/projected/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-kube-api-access-svhfq\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.403165 4921 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.403179 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.403192 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5bb2996f-5e63-42f1-a2a7-757a7fbcff35-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.680764 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" event={"ID":"5bb2996f-5e63-42f1-a2a7-757a7fbcff35","Type":"ContainerDied","Data":"89032fb2708cd38edd1317882fb0d1c2b0d4997a443647327c182eb3d9ed2779"} Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.680830 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89032fb2708cd38edd1317882fb0d1c2b0d4997a443647327c182eb3d9ed2779" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.680888 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.776376 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz"] Dec 10 13:18:50 crc kubenswrapper[4921]: E1210 13:18:50.777080 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bb2996f-5e63-42f1-a2a7-757a7fbcff35" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.777161 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bb2996f-5e63-42f1-a2a7-757a7fbcff35" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.777384 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bb2996f-5e63-42f1-a2a7-757a7fbcff35" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.778439 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.784582 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.785032 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.785068 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.786895 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz"] Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.788082 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.911446 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfgbn\" (UniqueName: \"kubernetes.io/projected/c9c17643-da1b-4b05-acc8-08b43f910a68-kube-api-access-tfgbn\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.911529 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.911567 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:50 crc kubenswrapper[4921]: I1210 13:18:50.911599 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.013302 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.013365 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.013408 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.013521 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfgbn\" (UniqueName: \"kubernetes.io/projected/c9c17643-da1b-4b05-acc8-08b43f910a68-kube-api-access-tfgbn\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.018097 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.018293 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.024897 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.035063 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfgbn\" (UniqueName: \"kubernetes.io/projected/c9c17643-da1b-4b05-acc8-08b43f910a68-kube-api-access-tfgbn\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.092859 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.606631 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz"] Dec 10 13:18:51 crc kubenswrapper[4921]: I1210 13:18:51.689097 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" event={"ID":"c9c17643-da1b-4b05-acc8-08b43f910a68","Type":"ContainerStarted","Data":"1b79a44385b5e2823f341b482bbc4cb31291f8abdb8c4c4b1ffe226d9ce8584b"} Dec 10 13:18:53 crc kubenswrapper[4921]: I1210 13:18:53.705475 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" event={"ID":"c9c17643-da1b-4b05-acc8-08b43f910a68","Type":"ContainerStarted","Data":"0e254652b21904e358d80afcc4605da1edbdb3f7c94e9ac660a9db0383ace220"} Dec 10 13:18:53 crc kubenswrapper[4921]: I1210 13:18:53.723599 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" podStartSLOduration=2.6724211799999997 podStartE2EDuration="3.723576391s" podCreationTimestamp="2025-12-10 13:18:50 +0000 UTC" firstStartedPulling="2025-12-10 13:18:51.606271707 +0000 UTC m=+1328.822493631" lastFinishedPulling="2025-12-10 13:18:52.657426918 +0000 UTC m=+1329.873648842" observedRunningTime="2025-12-10 13:18:53.719672436 +0000 UTC m=+1330.935894380" watchObservedRunningTime="2025-12-10 13:18:53.723576391 +0000 UTC m=+1330.939798325" Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.710965 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.711609 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.711662 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.712234 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dded596c5a06c9f34bbe49927b6ba36b53f24e08c9e890c58e9f5dbee945fc5f"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.712280 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://dded596c5a06c9f34bbe49927b6ba36b53f24e08c9e890c58e9f5dbee945fc5f" gracePeriod=600 Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.892788 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="dded596c5a06c9f34bbe49927b6ba36b53f24e08c9e890c58e9f5dbee945fc5f" exitCode=0 Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.892848 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"dded596c5a06c9f34bbe49927b6ba36b53f24e08c9e890c58e9f5dbee945fc5f"} Dec 10 13:19:16 crc kubenswrapper[4921]: I1210 13:19:16.892883 4921 scope.go:117] "RemoveContainer" containerID="4971420ec666b0633f92ea9dfa8a109dd0d7730fedda43f31a5af62a9d620d9b" Dec 10 13:19:17 crc kubenswrapper[4921]: I1210 13:19:17.905537 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c"} Dec 10 13:19:44 crc kubenswrapper[4921]: I1210 13:19:44.408452 4921 scope.go:117] "RemoveContainer" containerID="dc80fc09ebb77d2928c25b3aeb2b8dcb8c4dabe8ca7b77ac7536afb9d50ba60e" Dec 10 13:19:44 crc kubenswrapper[4921]: I1210 13:19:44.439495 4921 scope.go:117] "RemoveContainer" containerID="c55ad0a6c9269f9e508c276faf37d1bc329a34437f659118e593a633b057a28c" Dec 10 13:20:44 crc kubenswrapper[4921]: I1210 13:20:44.517976 4921 scope.go:117] "RemoveContainer" containerID="319ac56f724e00ff9a294ccccc30f08fa720f360b1775e1be4e911ffbaca1aa9" Dec 10 13:20:52 crc kubenswrapper[4921]: I1210 13:20:52.812981 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t96cb"] Dec 10 13:20:52 crc kubenswrapper[4921]: I1210 13:20:52.815823 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:52 crc kubenswrapper[4921]: I1210 13:20:52.835346 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t96cb"] Dec 10 13:20:52 crc kubenswrapper[4921]: I1210 13:20:52.987935 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-catalog-content\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:52 crc kubenswrapper[4921]: I1210 13:20:52.988003 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-utilities\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:52 crc kubenswrapper[4921]: I1210 13:20:52.988058 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnhjb\" (UniqueName: \"kubernetes.io/projected/1242d417-c080-41c7-a03b-cf7ac417f342-kube-api-access-nnhjb\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.089369 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-catalog-content\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.089450 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-utilities\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.089487 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnhjb\" (UniqueName: \"kubernetes.io/projected/1242d417-c080-41c7-a03b-cf7ac417f342-kube-api-access-nnhjb\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.089967 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-catalog-content\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.089978 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-utilities\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.111328 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnhjb\" (UniqueName: \"kubernetes.io/projected/1242d417-c080-41c7-a03b-cf7ac417f342-kube-api-access-nnhjb\") pod \"redhat-operators-t96cb\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.133558 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.584125 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t96cb"] Dec 10 13:20:53 crc kubenswrapper[4921]: I1210 13:20:53.739211 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerStarted","Data":"7e227a5f8c19a697977396ad10282b0fc0f0ef69db0805b013556e16294d14f1"} Dec 10 13:20:54 crc kubenswrapper[4921]: I1210 13:20:54.750014 4921 generic.go:334] "Generic (PLEG): container finished" podID="1242d417-c080-41c7-a03b-cf7ac417f342" containerID="d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4" exitCode=0 Dec 10 13:20:54 crc kubenswrapper[4921]: I1210 13:20:54.750073 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerDied","Data":"d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4"} Dec 10 13:20:56 crc kubenswrapper[4921]: I1210 13:20:56.770356 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerStarted","Data":"3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc"} Dec 10 13:20:59 crc kubenswrapper[4921]: I1210 13:20:59.797905 4921 generic.go:334] "Generic (PLEG): container finished" podID="1242d417-c080-41c7-a03b-cf7ac417f342" containerID="3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc" exitCode=0 Dec 10 13:20:59 crc kubenswrapper[4921]: I1210 13:20:59.798008 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerDied","Data":"3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc"} Dec 10 13:21:00 crc kubenswrapper[4921]: I1210 13:21:00.807874 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerStarted","Data":"ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5"} Dec 10 13:21:00 crc kubenswrapper[4921]: I1210 13:21:00.831611 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t96cb" podStartSLOduration=3.241049706 podStartE2EDuration="8.831593371s" podCreationTimestamp="2025-12-10 13:20:52 +0000 UTC" firstStartedPulling="2025-12-10 13:20:54.754649212 +0000 UTC m=+1451.970871136" lastFinishedPulling="2025-12-10 13:21:00.345192857 +0000 UTC m=+1457.561414801" observedRunningTime="2025-12-10 13:21:00.825172348 +0000 UTC m=+1458.041394272" watchObservedRunningTime="2025-12-10 13:21:00.831593371 +0000 UTC m=+1458.047815295" Dec 10 13:21:03 crc kubenswrapper[4921]: I1210 13:21:03.133947 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:21:03 crc kubenswrapper[4921]: I1210 13:21:03.134490 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:21:04 crc kubenswrapper[4921]: I1210 13:21:04.191586 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t96cb" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="registry-server" probeResult="failure" output=< Dec 10 13:21:04 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 13:21:04 crc kubenswrapper[4921]: > Dec 10 13:21:13 crc kubenswrapper[4921]: I1210 13:21:13.183838 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:21:13 crc kubenswrapper[4921]: I1210 13:21:13.235477 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:21:13 crc kubenswrapper[4921]: I1210 13:21:13.423146 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t96cb"] Dec 10 13:21:14 crc kubenswrapper[4921]: I1210 13:21:14.938202 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t96cb" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="registry-server" containerID="cri-o://ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5" gracePeriod=2 Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.383685 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.500612 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnhjb\" (UniqueName: \"kubernetes.io/projected/1242d417-c080-41c7-a03b-cf7ac417f342-kube-api-access-nnhjb\") pod \"1242d417-c080-41c7-a03b-cf7ac417f342\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.500727 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-utilities\") pod \"1242d417-c080-41c7-a03b-cf7ac417f342\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.500845 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-catalog-content\") pod \"1242d417-c080-41c7-a03b-cf7ac417f342\" (UID: \"1242d417-c080-41c7-a03b-cf7ac417f342\") " Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.501312 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-utilities" (OuterVolumeSpecName: "utilities") pod "1242d417-c080-41c7-a03b-cf7ac417f342" (UID: "1242d417-c080-41c7-a03b-cf7ac417f342"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.517678 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1242d417-c080-41c7-a03b-cf7ac417f342-kube-api-access-nnhjb" (OuterVolumeSpecName: "kube-api-access-nnhjb") pod "1242d417-c080-41c7-a03b-cf7ac417f342" (UID: "1242d417-c080-41c7-a03b-cf7ac417f342"). InnerVolumeSpecName "kube-api-access-nnhjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.602381 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnhjb\" (UniqueName: \"kubernetes.io/projected/1242d417-c080-41c7-a03b-cf7ac417f342-kube-api-access-nnhjb\") on node \"crc\" DevicePath \"\"" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.602432 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.628529 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1242d417-c080-41c7-a03b-cf7ac417f342" (UID: "1242d417-c080-41c7-a03b-cf7ac417f342"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.704494 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1242d417-c080-41c7-a03b-cf7ac417f342-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.951125 4921 generic.go:334] "Generic (PLEG): container finished" podID="1242d417-c080-41c7-a03b-cf7ac417f342" containerID="ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5" exitCode=0 Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.951196 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerDied","Data":"ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5"} Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.951246 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t96cb" event={"ID":"1242d417-c080-41c7-a03b-cf7ac417f342","Type":"ContainerDied","Data":"7e227a5f8c19a697977396ad10282b0fc0f0ef69db0805b013556e16294d14f1"} Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.951267 4921 scope.go:117] "RemoveContainer" containerID="ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.951262 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t96cb" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.969458 4921 scope.go:117] "RemoveContainer" containerID="3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc" Dec 10 13:21:15 crc kubenswrapper[4921]: I1210 13:21:15.996962 4921 scope.go:117] "RemoveContainer" containerID="d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.034488 4921 scope.go:117] "RemoveContainer" containerID="ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5" Dec 10 13:21:16 crc kubenswrapper[4921]: E1210 13:21:16.036253 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5\": container with ID starting with ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5 not found: ID does not exist" containerID="ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.036296 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5"} err="failed to get container status \"ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5\": rpc error: code = NotFound desc = could not find container \"ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5\": container with ID starting with ce151ed3740491b812974a2b3bad963574d863874aaedc45a959c7eb7f9ae4c5 not found: ID does not exist" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.036314 4921 scope.go:117] "RemoveContainer" containerID="3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc" Dec 10 13:21:16 crc kubenswrapper[4921]: E1210 13:21:16.036597 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc\": container with ID starting with 3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc not found: ID does not exist" containerID="3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.036641 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc"} err="failed to get container status \"3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc\": rpc error: code = NotFound desc = could not find container \"3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc\": container with ID starting with 3c56ad0dd82d5289c2650ac943edaf82e50f8a2f9712363617fa2999bb47d1dc not found: ID does not exist" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.036668 4921 scope.go:117] "RemoveContainer" containerID="d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4" Dec 10 13:21:16 crc kubenswrapper[4921]: E1210 13:21:16.038197 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4\": container with ID starting with d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4 not found: ID does not exist" containerID="d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.038219 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4"} err="failed to get container status \"d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4\": rpc error: code = NotFound desc = could not find container \"d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4\": container with ID starting with d0614721af3c57efb9c19900b026764315aea7c4ef7aa4558e25406a29ac71b4 not found: ID does not exist" Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.043460 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t96cb"] Dec 10 13:21:16 crc kubenswrapper[4921]: I1210 13:21:16.053991 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t96cb"] Dec 10 13:21:17 crc kubenswrapper[4921]: I1210 13:21:17.219897 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" path="/var/lib/kubelet/pods/1242d417-c080-41c7-a03b-cf7ac417f342/volumes" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.693836 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jjmbx"] Dec 10 13:21:37 crc kubenswrapper[4921]: E1210 13:21:37.694720 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="extract-utilities" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.694732 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="extract-utilities" Dec 10 13:21:37 crc kubenswrapper[4921]: E1210 13:21:37.694750 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="registry-server" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.694756 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="registry-server" Dec 10 13:21:37 crc kubenswrapper[4921]: E1210 13:21:37.694772 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="extract-content" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.694779 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="extract-content" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.694953 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="1242d417-c080-41c7-a03b-cf7ac417f342" containerName="registry-server" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.696212 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.709780 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jjmbx"] Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.857623 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-utilities\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.857949 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdt6d\" (UniqueName: \"kubernetes.io/projected/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-kube-api-access-mdt6d\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.857980 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-catalog-content\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.959815 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdt6d\" (UniqueName: \"kubernetes.io/projected/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-kube-api-access-mdt6d\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.959891 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-catalog-content\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.960030 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-utilities\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.960586 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-utilities\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.960756 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-catalog-content\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:37 crc kubenswrapper[4921]: I1210 13:21:37.988554 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdt6d\" (UniqueName: \"kubernetes.io/projected/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-kube-api-access-mdt6d\") pod \"community-operators-jjmbx\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:38 crc kubenswrapper[4921]: I1210 13:21:38.027226 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:38 crc kubenswrapper[4921]: I1210 13:21:38.524251 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jjmbx"] Dec 10 13:21:39 crc kubenswrapper[4921]: I1210 13:21:39.223924 4921 generic.go:334] "Generic (PLEG): container finished" podID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerID="6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65" exitCode=0 Dec 10 13:21:39 crc kubenswrapper[4921]: I1210 13:21:39.223965 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jjmbx" event={"ID":"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc","Type":"ContainerDied","Data":"6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65"} Dec 10 13:21:39 crc kubenswrapper[4921]: I1210 13:21:39.224311 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jjmbx" event={"ID":"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc","Type":"ContainerStarted","Data":"bc7d48371fedc5f648fc2a1db92c9f74e3cd1cf4c421679f0d78cbeac593a029"} Dec 10 13:21:39 crc kubenswrapper[4921]: I1210 13:21:39.228125 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:21:41 crc kubenswrapper[4921]: I1210 13:21:41.489285 4921 generic.go:334] "Generic (PLEG): container finished" podID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerID="015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313" exitCode=0 Dec 10 13:21:41 crc kubenswrapper[4921]: I1210 13:21:41.489341 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jjmbx" event={"ID":"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc","Type":"ContainerDied","Data":"015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313"} Dec 10 13:21:42 crc kubenswrapper[4921]: I1210 13:21:42.499355 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jjmbx" event={"ID":"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc","Type":"ContainerStarted","Data":"4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d"} Dec 10 13:21:42 crc kubenswrapper[4921]: I1210 13:21:42.521928 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jjmbx" podStartSLOduration=2.79666476 podStartE2EDuration="5.521911628s" podCreationTimestamp="2025-12-10 13:21:37 +0000 UTC" firstStartedPulling="2025-12-10 13:21:39.227877915 +0000 UTC m=+1496.444099839" lastFinishedPulling="2025-12-10 13:21:41.953124783 +0000 UTC m=+1499.169346707" observedRunningTime="2025-12-10 13:21:42.51454109 +0000 UTC m=+1499.730763044" watchObservedRunningTime="2025-12-10 13:21:42.521911628 +0000 UTC m=+1499.738133552" Dec 10 13:21:46 crc kubenswrapper[4921]: I1210 13:21:46.710771 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:21:46 crc kubenswrapper[4921]: I1210 13:21:46.711216 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:21:48 crc kubenswrapper[4921]: I1210 13:21:48.027545 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:48 crc kubenswrapper[4921]: I1210 13:21:48.027964 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:48 crc kubenswrapper[4921]: I1210 13:21:48.090044 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:48 crc kubenswrapper[4921]: I1210 13:21:48.585553 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:48 crc kubenswrapper[4921]: I1210 13:21:48.636838 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jjmbx"] Dec 10 13:21:50 crc kubenswrapper[4921]: I1210 13:21:50.566658 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jjmbx" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="registry-server" containerID="cri-o://4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d" gracePeriod=2 Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.054465 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.156359 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdt6d\" (UniqueName: \"kubernetes.io/projected/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-kube-api-access-mdt6d\") pod \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.156488 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-utilities\") pod \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.156661 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-catalog-content\") pod \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\" (UID: \"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc\") " Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.157441 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-utilities" (OuterVolumeSpecName: "utilities") pod "cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" (UID: "cf0d3848-39ec-4057-9bc1-64a2b48bd0dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.163969 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-kube-api-access-mdt6d" (OuterVolumeSpecName: "kube-api-access-mdt6d") pod "cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" (UID: "cf0d3848-39ec-4057-9bc1-64a2b48bd0dc"). InnerVolumeSpecName "kube-api-access-mdt6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.204863 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" (UID: "cf0d3848-39ec-4057-9bc1-64a2b48bd0dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.258492 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.258532 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdt6d\" (UniqueName: \"kubernetes.io/projected/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-kube-api-access-mdt6d\") on node \"crc\" DevicePath \"\"" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.258542 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.576685 4921 generic.go:334] "Generic (PLEG): container finished" podID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerID="4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d" exitCode=0 Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.576734 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jjmbx" event={"ID":"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc","Type":"ContainerDied","Data":"4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d"} Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.576806 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jjmbx" event={"ID":"cf0d3848-39ec-4057-9bc1-64a2b48bd0dc","Type":"ContainerDied","Data":"bc7d48371fedc5f648fc2a1db92c9f74e3cd1cf4c421679f0d78cbeac593a029"} Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.576842 4921 scope.go:117] "RemoveContainer" containerID="4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.577760 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jjmbx" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.606097 4921 scope.go:117] "RemoveContainer" containerID="015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.638451 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jjmbx"] Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.660312 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jjmbx"] Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.671555 4921 scope.go:117] "RemoveContainer" containerID="6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.712702 4921 scope.go:117] "RemoveContainer" containerID="4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d" Dec 10 13:21:51 crc kubenswrapper[4921]: E1210 13:21:51.713023 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d\": container with ID starting with 4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d not found: ID does not exist" containerID="4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.713052 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d"} err="failed to get container status \"4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d\": rpc error: code = NotFound desc = could not find container \"4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d\": container with ID starting with 4a1017eaa68d31be584c2f0a037351685be361d867ff6219b3673ade31b9041d not found: ID does not exist" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.713073 4921 scope.go:117] "RemoveContainer" containerID="015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313" Dec 10 13:21:51 crc kubenswrapper[4921]: E1210 13:21:51.713279 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313\": container with ID starting with 015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313 not found: ID does not exist" containerID="015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.713301 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313"} err="failed to get container status \"015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313\": rpc error: code = NotFound desc = could not find container \"015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313\": container with ID starting with 015d144c4f6266d982f98a0e310283770d615018525d70062eacefb066a26313 not found: ID does not exist" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.713314 4921 scope.go:117] "RemoveContainer" containerID="6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65" Dec 10 13:21:51 crc kubenswrapper[4921]: E1210 13:21:51.714468 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65\": container with ID starting with 6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65 not found: ID does not exist" containerID="6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65" Dec 10 13:21:51 crc kubenswrapper[4921]: I1210 13:21:51.714522 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65"} err="failed to get container status \"6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65\": rpc error: code = NotFound desc = could not find container \"6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65\": container with ID starting with 6fa93abf5bfcd788c9323ba44f861161e39f51897f64f406d255166ab8c4ff65 not found: ID does not exist" Dec 10 13:21:53 crc kubenswrapper[4921]: I1210 13:21:53.203529 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" path="/var/lib/kubelet/pods/cf0d3848-39ec-4057-9bc1-64a2b48bd0dc/volumes" Dec 10 13:22:16 crc kubenswrapper[4921]: I1210 13:22:16.710440 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:22:16 crc kubenswrapper[4921]: I1210 13:22:16.710939 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.588001 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:22:23 crc kubenswrapper[4921]: E1210 13:22:23.589119 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="extract-utilities" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.589134 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="extract-utilities" Dec 10 13:22:23 crc kubenswrapper[4921]: E1210 13:22:23.589151 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="extract-content" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.589159 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="extract-content" Dec 10 13:22:23 crc kubenswrapper[4921]: E1210 13:22:23.589180 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="registry-server" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.589189 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="registry-server" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.589500 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf0d3848-39ec-4057-9bc1-64a2b48bd0dc" containerName="registry-server" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.591100 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.610427 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.650617 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-utilities\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.650667 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rcb5\" (UniqueName: \"kubernetes.io/projected/80b6c46b-c866-4183-8e59-6f802fdaaa75-kube-api-access-9rcb5\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.650755 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-catalog-content\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.752936 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rcb5\" (UniqueName: \"kubernetes.io/projected/80b6c46b-c866-4183-8e59-6f802fdaaa75-kube-api-access-9rcb5\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.753355 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-catalog-content\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.753646 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-utilities\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.753808 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-catalog-content\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.754175 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-utilities\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.776262 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rcb5\" (UniqueName: \"kubernetes.io/projected/80b6c46b-c866-4183-8e59-6f802fdaaa75-kube-api-access-9rcb5\") pod \"certified-operators-stzwv\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:23 crc kubenswrapper[4921]: I1210 13:22:23.921732 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:24 crc kubenswrapper[4921]: I1210 13:22:24.388333 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:22:24 crc kubenswrapper[4921]: W1210 13:22:24.397271 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80b6c46b_c866_4183_8e59_6f802fdaaa75.slice/crio-88c9444d0318acaa787e0ad0d7efdaed2a8a77ea8014c9ec34445cc908bb6165 WatchSource:0}: Error finding container 88c9444d0318acaa787e0ad0d7efdaed2a8a77ea8014c9ec34445cc908bb6165: Status 404 returned error can't find the container with id 88c9444d0318acaa787e0ad0d7efdaed2a8a77ea8014c9ec34445cc908bb6165 Dec 10 13:22:24 crc kubenswrapper[4921]: I1210 13:22:24.862071 4921 generic.go:334] "Generic (PLEG): container finished" podID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerID="7e6df91d56a07ee152b89670a221b8ae6f4a5510e92812541d6383d37fab30eb" exitCode=0 Dec 10 13:22:24 crc kubenswrapper[4921]: I1210 13:22:24.862146 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stzwv" event={"ID":"80b6c46b-c866-4183-8e59-6f802fdaaa75","Type":"ContainerDied","Data":"7e6df91d56a07ee152b89670a221b8ae6f4a5510e92812541d6383d37fab30eb"} Dec 10 13:22:24 crc kubenswrapper[4921]: I1210 13:22:24.862547 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stzwv" event={"ID":"80b6c46b-c866-4183-8e59-6f802fdaaa75","Type":"ContainerStarted","Data":"88c9444d0318acaa787e0ad0d7efdaed2a8a77ea8014c9ec34445cc908bb6165"} Dec 10 13:22:30 crc kubenswrapper[4921]: I1210 13:22:30.919456 4921 generic.go:334] "Generic (PLEG): container finished" podID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerID="ff04f8214847665c5fc4e9a058dae1170dab08a4433ed060b48724c2c8d19f7d" exitCode=0 Dec 10 13:22:30 crc kubenswrapper[4921]: I1210 13:22:30.919517 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stzwv" event={"ID":"80b6c46b-c866-4183-8e59-6f802fdaaa75","Type":"ContainerDied","Data":"ff04f8214847665c5fc4e9a058dae1170dab08a4433ed060b48724c2c8d19f7d"} Dec 10 13:22:32 crc kubenswrapper[4921]: I1210 13:22:32.939311 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stzwv" event={"ID":"80b6c46b-c866-4183-8e59-6f802fdaaa75","Type":"ContainerStarted","Data":"7cdbe8d92578a072bd55018d52e6a08cf7a5ca32956245f4d56b344e8941253a"} Dec 10 13:22:32 crc kubenswrapper[4921]: I1210 13:22:32.965940 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-stzwv" podStartSLOduration=2.6761979179999997 podStartE2EDuration="9.965921174s" podCreationTimestamp="2025-12-10 13:22:23 +0000 UTC" firstStartedPulling="2025-12-10 13:22:24.86347699 +0000 UTC m=+1542.079698914" lastFinishedPulling="2025-12-10 13:22:32.153200236 +0000 UTC m=+1549.369422170" observedRunningTime="2025-12-10 13:22:32.95758346 +0000 UTC m=+1550.173805384" watchObservedRunningTime="2025-12-10 13:22:32.965921174 +0000 UTC m=+1550.182143108" Dec 10 13:22:33 crc kubenswrapper[4921]: I1210 13:22:33.922817 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:33 crc kubenswrapper[4921]: I1210 13:22:33.923136 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:34 crc kubenswrapper[4921]: I1210 13:22:34.985833 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-stzwv" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="registry-server" probeResult="failure" output=< Dec 10 13:22:34 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 13:22:34 crc kubenswrapper[4921]: > Dec 10 13:22:43 crc kubenswrapper[4921]: I1210 13:22:43.979715 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.056231 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.165977 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.223308 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s2x6r"] Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.223634 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s2x6r" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="registry-server" containerID="cri-o://b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22" gracePeriod=2 Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.716875 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.866375 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-utilities\") pod \"02c8bc85-0cb6-49fb-8c84-625acc7de573\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.866657 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-catalog-content\") pod \"02c8bc85-0cb6-49fb-8c84-625acc7de573\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.866730 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j7l2\" (UniqueName: \"kubernetes.io/projected/02c8bc85-0cb6-49fb-8c84-625acc7de573-kube-api-access-5j7l2\") pod \"02c8bc85-0cb6-49fb-8c84-625acc7de573\" (UID: \"02c8bc85-0cb6-49fb-8c84-625acc7de573\") " Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.866837 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-utilities" (OuterVolumeSpecName: "utilities") pod "02c8bc85-0cb6-49fb-8c84-625acc7de573" (UID: "02c8bc85-0cb6-49fb-8c84-625acc7de573"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.867810 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.878718 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02c8bc85-0cb6-49fb-8c84-625acc7de573-kube-api-access-5j7l2" (OuterVolumeSpecName: "kube-api-access-5j7l2") pod "02c8bc85-0cb6-49fb-8c84-625acc7de573" (UID: "02c8bc85-0cb6-49fb-8c84-625acc7de573"). InnerVolumeSpecName "kube-api-access-5j7l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.912952 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02c8bc85-0cb6-49fb-8c84-625acc7de573" (UID: "02c8bc85-0cb6-49fb-8c84-625acc7de573"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.969669 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8bc85-0cb6-49fb-8c84-625acc7de573-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:22:44 crc kubenswrapper[4921]: I1210 13:22:44.969708 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j7l2\" (UniqueName: \"kubernetes.io/projected/02c8bc85-0cb6-49fb-8c84-625acc7de573-kube-api-access-5j7l2\") on node \"crc\" DevicePath \"\"" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.332659 4921 generic.go:334] "Generic (PLEG): container finished" podID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerID="b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22" exitCode=0 Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.332832 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2x6r" event={"ID":"02c8bc85-0cb6-49fb-8c84-625acc7de573","Type":"ContainerDied","Data":"b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22"} Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.332911 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2x6r" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.334675 4921 scope.go:117] "RemoveContainer" containerID="b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.334638 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2x6r" event={"ID":"02c8bc85-0cb6-49fb-8c84-625acc7de573","Type":"ContainerDied","Data":"5049499e516474bcdfa75a35561e175178f98ce528f1ea3dad8f031998df3aa3"} Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.359427 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s2x6r"] Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.363657 4921 scope.go:117] "RemoveContainer" containerID="22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.370898 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s2x6r"] Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.423416 4921 scope.go:117] "RemoveContainer" containerID="98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.457255 4921 scope.go:117] "RemoveContainer" containerID="b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22" Dec 10 13:22:45 crc kubenswrapper[4921]: E1210 13:22:45.457874 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22\": container with ID starting with b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22 not found: ID does not exist" containerID="b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.457904 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22"} err="failed to get container status \"b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22\": rpc error: code = NotFound desc = could not find container \"b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22\": container with ID starting with b33a9fba470582a105bc126dd70e60ea266c8e5b68211e43933cba4ede59cb22 not found: ID does not exist" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.457922 4921 scope.go:117] "RemoveContainer" containerID="22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb" Dec 10 13:22:45 crc kubenswrapper[4921]: E1210 13:22:45.458473 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb\": container with ID starting with 22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb not found: ID does not exist" containerID="22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.458493 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb"} err="failed to get container status \"22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb\": rpc error: code = NotFound desc = could not find container \"22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb\": container with ID starting with 22f16ccc2658e514872fc78d57e530212c960da5579342cc8fe6b368eb36ddcb not found: ID does not exist" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.458504 4921 scope.go:117] "RemoveContainer" containerID="98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79" Dec 10 13:22:45 crc kubenswrapper[4921]: E1210 13:22:45.458870 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79\": container with ID starting with 98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79 not found: ID does not exist" containerID="98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79" Dec 10 13:22:45 crc kubenswrapper[4921]: I1210 13:22:45.458889 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79"} err="failed to get container status \"98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79\": rpc error: code = NotFound desc = could not find container \"98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79\": container with ID starting with 98a315dce7a124e95c4d1c598c4d27e0e6965394a3ddd659b8dd4457b439dd79 not found: ID does not exist" Dec 10 13:22:46 crc kubenswrapper[4921]: I1210 13:22:46.711175 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:22:46 crc kubenswrapper[4921]: I1210 13:22:46.711472 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:22:46 crc kubenswrapper[4921]: I1210 13:22:46.711510 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:22:46 crc kubenswrapper[4921]: I1210 13:22:46.712007 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:22:46 crc kubenswrapper[4921]: I1210 13:22:46.712048 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" gracePeriod=600 Dec 10 13:22:46 crc kubenswrapper[4921]: E1210 13:22:46.856213 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:22:47 crc kubenswrapper[4921]: I1210 13:22:47.204684 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" path="/var/lib/kubelet/pods/02c8bc85-0cb6-49fb-8c84-625acc7de573/volumes" Dec 10 13:22:47 crc kubenswrapper[4921]: I1210 13:22:47.362997 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" exitCode=0 Dec 10 13:22:47 crc kubenswrapper[4921]: I1210 13:22:47.363038 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c"} Dec 10 13:22:47 crc kubenswrapper[4921]: I1210 13:22:47.363069 4921 scope.go:117] "RemoveContainer" containerID="dded596c5a06c9f34bbe49927b6ba36b53f24e08c9e890c58e9f5dbee945fc5f" Dec 10 13:22:47 crc kubenswrapper[4921]: I1210 13:22:47.364539 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:22:47 crc kubenswrapper[4921]: E1210 13:22:47.365656 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.444076 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6m45g"] Dec 10 13:22:49 crc kubenswrapper[4921]: E1210 13:22:49.444806 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="registry-server" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.444820 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="registry-server" Dec 10 13:22:49 crc kubenswrapper[4921]: E1210 13:22:49.444836 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="extract-content" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.444843 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="extract-content" Dec 10 13:22:49 crc kubenswrapper[4921]: E1210 13:22:49.444879 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="extract-utilities" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.444921 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="extract-utilities" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.445142 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c8bc85-0cb6-49fb-8c84-625acc7de573" containerName="registry-server" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.446683 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.469695 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m45g"] Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.556440 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-catalog-content\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.556519 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-utilities\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.556598 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7qwj\" (UniqueName: \"kubernetes.io/projected/a2c53703-0670-4275-a2bc-4ea3022fcd8e-kube-api-access-d7qwj\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.658321 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-catalog-content\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.658462 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-utilities\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.658533 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7qwj\" (UniqueName: \"kubernetes.io/projected/a2c53703-0670-4275-a2bc-4ea3022fcd8e-kube-api-access-d7qwj\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.658744 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-catalog-content\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.659191 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-utilities\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.683512 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7qwj\" (UniqueName: \"kubernetes.io/projected/a2c53703-0670-4275-a2bc-4ea3022fcd8e-kube-api-access-d7qwj\") pod \"redhat-marketplace-6m45g\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:49 crc kubenswrapper[4921]: I1210 13:22:49.769462 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:50 crc kubenswrapper[4921]: I1210 13:22:50.229074 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m45g"] Dec 10 13:22:50 crc kubenswrapper[4921]: I1210 13:22:50.395539 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerStarted","Data":"438fadd87b2a0c1226ebacbe58bd1ff8306056a92980203d422a6031ecf26f22"} Dec 10 13:22:51 crc kubenswrapper[4921]: I1210 13:22:51.407095 4921 generic.go:334] "Generic (PLEG): container finished" podID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerID="d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696" exitCode=0 Dec 10 13:22:51 crc kubenswrapper[4921]: I1210 13:22:51.407293 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerDied","Data":"d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696"} Dec 10 13:22:52 crc kubenswrapper[4921]: I1210 13:22:52.423287 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerStarted","Data":"5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c"} Dec 10 13:22:53 crc kubenswrapper[4921]: I1210 13:22:53.437291 4921 generic.go:334] "Generic (PLEG): container finished" podID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerID="5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c" exitCode=0 Dec 10 13:22:53 crc kubenswrapper[4921]: I1210 13:22:53.437369 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerDied","Data":"5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c"} Dec 10 13:22:54 crc kubenswrapper[4921]: I1210 13:22:54.450520 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerStarted","Data":"226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4"} Dec 10 13:22:55 crc kubenswrapper[4921]: I1210 13:22:55.481738 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6m45g" podStartSLOduration=3.957634405 podStartE2EDuration="6.481717646s" podCreationTimestamp="2025-12-10 13:22:49 +0000 UTC" firstStartedPulling="2025-12-10 13:22:51.409727536 +0000 UTC m=+1568.625949460" lastFinishedPulling="2025-12-10 13:22:53.933810777 +0000 UTC m=+1571.150032701" observedRunningTime="2025-12-10 13:22:55.471937053 +0000 UTC m=+1572.688158987" watchObservedRunningTime="2025-12-10 13:22:55.481717646 +0000 UTC m=+1572.697939580" Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.060910 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-lgf2r"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.067932 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-f6ee-account-create-update-w9mnd"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.077776 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-f7n4f"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.086264 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9f3a-account-create-update-fl26p"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.102161 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-k8zlg"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.109774 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-6cc7-account-create-update-96qr2"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.117122 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-f7n4f"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.126537 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-k8zlg"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.136326 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-f6ee-account-create-update-w9mnd"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.145554 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9f3a-account-create-update-fl26p"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.154522 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-lgf2r"] Dec 10 13:22:58 crc kubenswrapper[4921]: I1210 13:22:58.163629 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-6cc7-account-create-update-96qr2"] Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.193539 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:22:59 crc kubenswrapper[4921]: E1210 13:22:59.193818 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.206824 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2082e853-50b5-45c4-ba0a-d27cdd8e702a" path="/var/lib/kubelet/pods/2082e853-50b5-45c4-ba0a-d27cdd8e702a/volumes" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.207811 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39c24196-2136-47b7-9aa5-372cf1de38db" path="/var/lib/kubelet/pods/39c24196-2136-47b7-9aa5-372cf1de38db/volumes" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.208588 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67e2c579-4d8a-4ee6-bc94-45188bce58d4" path="/var/lib/kubelet/pods/67e2c579-4d8a-4ee6-bc94-45188bce58d4/volumes" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.209323 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="880b1430-d97c-499e-983f-6ec48060e96b" path="/var/lib/kubelet/pods/880b1430-d97c-499e-983f-6ec48060e96b/volumes" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.210799 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0a10f7e-8028-455f-8db2-8c6e0b6e9d93" path="/var/lib/kubelet/pods/b0a10f7e-8028-455f-8db2-8c6e0b6e9d93/volumes" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.211560 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6179226-7f67-4fad-95fd-7542f4abdcff" path="/var/lib/kubelet/pods/f6179226-7f67-4fad-95fd-7542f4abdcff/volumes" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.770057 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.770098 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:22:59 crc kubenswrapper[4921]: I1210 13:22:59.819921 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:23:00 crc kubenswrapper[4921]: I1210 13:23:00.552085 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:23:00 crc kubenswrapper[4921]: I1210 13:23:00.603797 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m45g"] Dec 10 13:23:02 crc kubenswrapper[4921]: I1210 13:23:02.525473 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6m45g" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="registry-server" containerID="cri-o://226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4" gracePeriod=2 Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.031257 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.195537 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7qwj\" (UniqueName: \"kubernetes.io/projected/a2c53703-0670-4275-a2bc-4ea3022fcd8e-kube-api-access-d7qwj\") pod \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.195632 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-utilities\") pod \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.195732 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-catalog-content\") pod \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\" (UID: \"a2c53703-0670-4275-a2bc-4ea3022fcd8e\") " Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.197420 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-utilities" (OuterVolumeSpecName: "utilities") pod "a2c53703-0670-4275-a2bc-4ea3022fcd8e" (UID: "a2c53703-0670-4275-a2bc-4ea3022fcd8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.207277 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2c53703-0670-4275-a2bc-4ea3022fcd8e-kube-api-access-d7qwj" (OuterVolumeSpecName: "kube-api-access-d7qwj") pod "a2c53703-0670-4275-a2bc-4ea3022fcd8e" (UID: "a2c53703-0670-4275-a2bc-4ea3022fcd8e"). InnerVolumeSpecName "kube-api-access-d7qwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.229331 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2c53703-0670-4275-a2bc-4ea3022fcd8e" (UID: "a2c53703-0670-4275-a2bc-4ea3022fcd8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.298247 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.298284 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7qwj\" (UniqueName: \"kubernetes.io/projected/a2c53703-0670-4275-a2bc-4ea3022fcd8e-kube-api-access-d7qwj\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.298301 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2c53703-0670-4275-a2bc-4ea3022fcd8e-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.538314 4921 generic.go:334] "Generic (PLEG): container finished" podID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerID="226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4" exitCode=0 Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.538356 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerDied","Data":"226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4"} Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.538397 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6m45g" event={"ID":"a2c53703-0670-4275-a2bc-4ea3022fcd8e","Type":"ContainerDied","Data":"438fadd87b2a0c1226ebacbe58bd1ff8306056a92980203d422a6031ecf26f22"} Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.538420 4921 scope.go:117] "RemoveContainer" containerID="226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.538442 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6m45g" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.571260 4921 scope.go:117] "RemoveContainer" containerID="5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.598043 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m45g"] Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.626154 4921 scope.go:117] "RemoveContainer" containerID="d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.629282 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6m45g"] Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.660649 4921 scope.go:117] "RemoveContainer" containerID="226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4" Dec 10 13:23:03 crc kubenswrapper[4921]: E1210 13:23:03.661834 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4\": container with ID starting with 226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4 not found: ID does not exist" containerID="226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.661882 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4"} err="failed to get container status \"226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4\": rpc error: code = NotFound desc = could not find container \"226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4\": container with ID starting with 226e14c46688ae944db54b9f814d0f5878bb2ed00c2b16d47982bb4dc958f1a4 not found: ID does not exist" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.661901 4921 scope.go:117] "RemoveContainer" containerID="5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c" Dec 10 13:23:03 crc kubenswrapper[4921]: E1210 13:23:03.662294 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c\": container with ID starting with 5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c not found: ID does not exist" containerID="5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.662329 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c"} err="failed to get container status \"5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c\": rpc error: code = NotFound desc = could not find container \"5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c\": container with ID starting with 5f43052b16b496e52a2f81e7105d256b01c27fd4e560416a2d45e3a6f58bc72c not found: ID does not exist" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.662348 4921 scope.go:117] "RemoveContainer" containerID="d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696" Dec 10 13:23:03 crc kubenswrapper[4921]: E1210 13:23:03.662781 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696\": container with ID starting with d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696 not found: ID does not exist" containerID="d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696" Dec 10 13:23:03 crc kubenswrapper[4921]: I1210 13:23:03.662814 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696"} err="failed to get container status \"d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696\": rpc error: code = NotFound desc = could not find container \"d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696\": container with ID starting with d4860af01f9d2efd2ce359f399a93c9bfdb4c2159913059afe4d117d36c72696 not found: ID does not exist" Dec 10 13:23:05 crc kubenswrapper[4921]: I1210 13:23:05.212577 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" path="/var/lib/kubelet/pods/a2c53703-0670-4275-a2bc-4ea3022fcd8e/volumes" Dec 10 13:23:12 crc kubenswrapper[4921]: I1210 13:23:12.611243 4921 generic.go:334] "Generic (PLEG): container finished" podID="c9c17643-da1b-4b05-acc8-08b43f910a68" containerID="0e254652b21904e358d80afcc4605da1edbdb3f7c94e9ac660a9db0383ace220" exitCode=0 Dec 10 13:23:12 crc kubenswrapper[4921]: I1210 13:23:12.611665 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" event={"ID":"c9c17643-da1b-4b05-acc8-08b43f910a68","Type":"ContainerDied","Data":"0e254652b21904e358d80afcc4605da1edbdb3f7c94e9ac660a9db0383ace220"} Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.071456 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.120977 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-bootstrap-combined-ca-bundle\") pod \"c9c17643-da1b-4b05-acc8-08b43f910a68\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.121077 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-inventory\") pod \"c9c17643-da1b-4b05-acc8-08b43f910a68\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.121203 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-ssh-key\") pod \"c9c17643-da1b-4b05-acc8-08b43f910a68\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.121249 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfgbn\" (UniqueName: \"kubernetes.io/projected/c9c17643-da1b-4b05-acc8-08b43f910a68-kube-api-access-tfgbn\") pod \"c9c17643-da1b-4b05-acc8-08b43f910a68\" (UID: \"c9c17643-da1b-4b05-acc8-08b43f910a68\") " Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.126276 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c9c17643-da1b-4b05-acc8-08b43f910a68" (UID: "c9c17643-da1b-4b05-acc8-08b43f910a68"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.147344 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9c17643-da1b-4b05-acc8-08b43f910a68-kube-api-access-tfgbn" (OuterVolumeSpecName: "kube-api-access-tfgbn") pod "c9c17643-da1b-4b05-acc8-08b43f910a68" (UID: "c9c17643-da1b-4b05-acc8-08b43f910a68"). InnerVolumeSpecName "kube-api-access-tfgbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.154534 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c9c17643-da1b-4b05-acc8-08b43f910a68" (UID: "c9c17643-da1b-4b05-acc8-08b43f910a68"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.173878 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-inventory" (OuterVolumeSpecName: "inventory") pod "c9c17643-da1b-4b05-acc8-08b43f910a68" (UID: "c9c17643-da1b-4b05-acc8-08b43f910a68"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.192291 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:23:14 crc kubenswrapper[4921]: E1210 13:23:14.192665 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.223224 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.223254 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfgbn\" (UniqueName: \"kubernetes.io/projected/c9c17643-da1b-4b05-acc8-08b43f910a68-kube-api-access-tfgbn\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.223268 4921 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.223280 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9c17643-da1b-4b05-acc8-08b43f910a68-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.630031 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" event={"ID":"c9c17643-da1b-4b05-acc8-08b43f910a68","Type":"ContainerDied","Data":"1b79a44385b5e2823f341b482bbc4cb31291f8abdb8c4c4b1ffe226d9ce8584b"} Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.630321 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b79a44385b5e2823f341b482bbc4cb31291f8abdb8c4c4b1ffe226d9ce8584b" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.631122 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.731365 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br"] Dec 10 13:23:14 crc kubenswrapper[4921]: E1210 13:23:14.731933 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="extract-content" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.732028 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="extract-content" Dec 10 13:23:14 crc kubenswrapper[4921]: E1210 13:23:14.732118 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="registry-server" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.732178 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="registry-server" Dec 10 13:23:14 crc kubenswrapper[4921]: E1210 13:23:14.732239 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c17643-da1b-4b05-acc8-08b43f910a68" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.732294 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c17643-da1b-4b05-acc8-08b43f910a68" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 10 13:23:14 crc kubenswrapper[4921]: E1210 13:23:14.732359 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="extract-utilities" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.732431 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="extract-utilities" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.732666 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2c53703-0670-4275-a2bc-4ea3022fcd8e" containerName="registry-server" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.732744 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9c17643-da1b-4b05-acc8-08b43f910a68" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.733352 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.738442 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.738882 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.739086 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.739116 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.750747 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br"] Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.833641 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b582c\" (UniqueName: \"kubernetes.io/projected/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-kube-api-access-b582c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.833719 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.833789 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.934753 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.934825 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.934915 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b582c\" (UniqueName: \"kubernetes.io/projected/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-kube-api-access-b582c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.938882 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.939061 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:14 crc kubenswrapper[4921]: I1210 13:23:14.955789 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b582c\" (UniqueName: \"kubernetes.io/projected/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-kube-api-access-b582c\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-627br\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:15 crc kubenswrapper[4921]: I1210 13:23:15.071364 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:23:15 crc kubenswrapper[4921]: I1210 13:23:15.630724 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br"] Dec 10 13:23:16 crc kubenswrapper[4921]: I1210 13:23:16.660586 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" event={"ID":"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7","Type":"ContainerStarted","Data":"5b13044877218ed12e79761516423474b45d6c2990cefa0bd6f6fe47acc6735e"} Dec 10 13:23:17 crc kubenswrapper[4921]: I1210 13:23:17.679281 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" event={"ID":"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7","Type":"ContainerStarted","Data":"2d1488ad361ce8310e11fbffac6fe9fd9e4c72defff1c46ec9d836430983b52d"} Dec 10 13:23:17 crc kubenswrapper[4921]: I1210 13:23:17.698816 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" podStartSLOduration=2.923583011 podStartE2EDuration="3.698802145s" podCreationTimestamp="2025-12-10 13:23:14 +0000 UTC" firstStartedPulling="2025-12-10 13:23:15.640527287 +0000 UTC m=+1592.856749211" lastFinishedPulling="2025-12-10 13:23:16.415746421 +0000 UTC m=+1593.631968345" observedRunningTime="2025-12-10 13:23:17.697703866 +0000 UTC m=+1594.913925810" watchObservedRunningTime="2025-12-10 13:23:17.698802145 +0000 UTC m=+1594.915024069" Dec 10 13:23:28 crc kubenswrapper[4921]: I1210 13:23:28.193107 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:23:28 crc kubenswrapper[4921]: E1210 13:23:28.194053 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:23:36 crc kubenswrapper[4921]: I1210 13:23:36.042938 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-kkx6d"] Dec 10 13:23:36 crc kubenswrapper[4921]: I1210 13:23:36.052526 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mgjsp"] Dec 10 13:23:36 crc kubenswrapper[4921]: I1210 13:23:36.059686 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-kkx6d"] Dec 10 13:23:36 crc kubenswrapper[4921]: I1210 13:23:36.066822 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mgjsp"] Dec 10 13:23:37 crc kubenswrapper[4921]: I1210 13:23:37.210244 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13330318-8c10-4f9c-9e04-1c4e6005d84e" path="/var/lib/kubelet/pods/13330318-8c10-4f9c-9e04-1c4e6005d84e/volumes" Dec 10 13:23:37 crc kubenswrapper[4921]: I1210 13:23:37.211976 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f13cffa1-660f-45b0-9207-39d7059f1ec8" path="/var/lib/kubelet/pods/f13cffa1-660f-45b0-9207-39d7059f1ec8/volumes" Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.052436 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-526f-account-create-update-76g79"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.061123 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-3ad1-account-create-update-mvnfv"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.073608 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-74qkt"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.086693 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5836-account-create-update-98fk7"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.096504 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-526f-account-create-update-76g79"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.103403 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5836-account-create-update-98fk7"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.124428 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-3ad1-account-create-update-mvnfv"] Dec 10 13:23:40 crc kubenswrapper[4921]: I1210 13:23:40.131584 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-74qkt"] Dec 10 13:23:41 crc kubenswrapper[4921]: I1210 13:23:41.205484 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="544608c5-4a34-46c5-9f36-7bd4cf7c3eb3" path="/var/lib/kubelet/pods/544608c5-4a34-46c5-9f36-7bd4cf7c3eb3/volumes" Dec 10 13:23:41 crc kubenswrapper[4921]: I1210 13:23:41.206179 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90748fc1-2cb1-4e08-b531-8c835eaded23" path="/var/lib/kubelet/pods/90748fc1-2cb1-4e08-b531-8c835eaded23/volumes" Dec 10 13:23:41 crc kubenswrapper[4921]: I1210 13:23:41.206846 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e1315d0-712b-4119-8569-304e3bcdf8e6" path="/var/lib/kubelet/pods/9e1315d0-712b-4119-8569-304e3bcdf8e6/volumes" Dec 10 13:23:41 crc kubenswrapper[4921]: I1210 13:23:41.207543 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d834ef72-8c06-49b5-b966-8114d30fe9de" path="/var/lib/kubelet/pods/d834ef72-8c06-49b5-b966-8114d30fe9de/volumes" Dec 10 13:23:42 crc kubenswrapper[4921]: I1210 13:23:42.029462 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-zc8hg"] Dec 10 13:23:42 crc kubenswrapper[4921]: I1210 13:23:42.037186 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-zc8hg"] Dec 10 13:23:43 crc kubenswrapper[4921]: I1210 13:23:43.207006 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:23:43 crc kubenswrapper[4921]: E1210 13:23:43.207267 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:23:43 crc kubenswrapper[4921]: I1210 13:23:43.207539 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99af9def-7de6-4bab-98f9-890433c3836e" path="/var/lib/kubelet/pods/99af9def-7de6-4bab-98f9-890433c3836e/volumes" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.651950 4921 scope.go:117] "RemoveContainer" containerID="6414d2ef57e73c050f6fdf5fd98f79262cd0ba793231fe2ff9a4ed4fde465179" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.690124 4921 scope.go:117] "RemoveContainer" containerID="9f99ecbc9cfbe13cb75b13ca6e15739cd31ea355853b4669d90754b200dd451a" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.726755 4921 scope.go:117] "RemoveContainer" containerID="cf0b5f250cabecaae51f6158f1a69de27f852a281c6cb357f5fe6b988258181e" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.767300 4921 scope.go:117] "RemoveContainer" containerID="72f68979cfe62c4ddd72ec2b3bf1bca68c8f49fa84227930a085ec178eb09e29" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.806674 4921 scope.go:117] "RemoveContainer" containerID="8b4c6c48222767cc4b0baf809878d54b028d221e4544a96cf1e56740fc07cfd5" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.853395 4921 scope.go:117] "RemoveContainer" containerID="1452b30e66dc68cadd0df3afe5303618cd96b0d67003f01ee981cbb94ee9bca3" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.898100 4921 scope.go:117] "RemoveContainer" containerID="ea141416d8a94280d9f965754fc53c9f3d2851ea26ac3998de3c201749934a2e" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.917216 4921 scope.go:117] "RemoveContainer" containerID="60c44768682bdd870ce40fe60e60103dcd9f25ccbd8d215f4f905f0449f6b6e9" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.939018 4921 scope.go:117] "RemoveContainer" containerID="eec60dd7636ad4df7ef7448b8acb9de26e94908fd8f33c38c1907ed438a5d547" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.963790 4921 scope.go:117] "RemoveContainer" containerID="b8ce645bdc2eda66611a81c5f2da203a7481c58d530931f162a583b8bfa2837b" Dec 10 13:23:44 crc kubenswrapper[4921]: I1210 13:23:44.986921 4921 scope.go:117] "RemoveContainer" containerID="f3ae60355260c0e593ce3bd90588adcbd2a4a48b5cbc752b5896e20af1e67300" Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.035460 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4hrfk"] Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.053634 4921 scope.go:117] "RemoveContainer" containerID="712ab96d0e5a8a7b62fe495759c83a77161905eb3dd495f54a2326e27c7a7b37" Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.056303 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4hrfk"] Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.073411 4921 scope.go:117] "RemoveContainer" containerID="bf80757d248f1b911eea2282899dc08284b186a27dc3cbf291d7f89a7958e70e" Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.094167 4921 scope.go:117] "RemoveContainer" containerID="09f3e93d6f5f4c2cd2b9d1dbcad906f238b66627431a76f79cc46d8967d0b090" Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.116036 4921 scope.go:117] "RemoveContainer" containerID="b4b4a895c1597e20c0953747b4ee7a22b38b75c00abb3664de38febf2b0c1542" Dec 10 13:23:45 crc kubenswrapper[4921]: I1210 13:23:45.205932 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe4e94c-2099-452f-ac61-828e372f18a1" path="/var/lib/kubelet/pods/ebe4e94c-2099-452f-ac61-828e372f18a1/volumes" Dec 10 13:23:58 crc kubenswrapper[4921]: I1210 13:23:58.193883 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:23:58 crc kubenswrapper[4921]: E1210 13:23:58.194767 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:24:08 crc kubenswrapper[4921]: I1210 13:24:08.058332 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-fm45n"] Dec 10 13:24:08 crc kubenswrapper[4921]: I1210 13:24:08.069570 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-fm45n"] Dec 10 13:24:09 crc kubenswrapper[4921]: I1210 13:24:09.205457 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473" path="/var/lib/kubelet/pods/cd9bf4e6-e6d1-4df4-ab06-5b7bc6f88473/volumes" Dec 10 13:24:11 crc kubenswrapper[4921]: I1210 13:24:11.194144 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:24:11 crc kubenswrapper[4921]: E1210 13:24:11.194573 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:24:20 crc kubenswrapper[4921]: I1210 13:24:20.035647 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-2ztsg"] Dec 10 13:24:20 crc kubenswrapper[4921]: I1210 13:24:20.043996 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-mlljr"] Dec 10 13:24:20 crc kubenswrapper[4921]: I1210 13:24:20.051664 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-w9cf8"] Dec 10 13:24:20 crc kubenswrapper[4921]: I1210 13:24:20.060568 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-2ztsg"] Dec 10 13:24:20 crc kubenswrapper[4921]: I1210 13:24:20.069139 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-w9cf8"] Dec 10 13:24:20 crc kubenswrapper[4921]: I1210 13:24:20.077616 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-mlljr"] Dec 10 13:24:21 crc kubenswrapper[4921]: I1210 13:24:21.205697 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7efd750c-824b-443b-a0e3-dc57a14e928c" path="/var/lib/kubelet/pods/7efd750c-824b-443b-a0e3-dc57a14e928c/volumes" Dec 10 13:24:21 crc kubenswrapper[4921]: I1210 13:24:21.206462 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99b2821d-2229-4c9e-8b01-699b20d6d65e" path="/var/lib/kubelet/pods/99b2821d-2229-4c9e-8b01-699b20d6d65e/volumes" Dec 10 13:24:21 crc kubenswrapper[4921]: I1210 13:24:21.207145 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e88d7b-1b52-4f84-9648-61b3fc78a4f5" path="/var/lib/kubelet/pods/f1e88d7b-1b52-4f84-9648-61b3fc78a4f5/volumes" Dec 10 13:24:25 crc kubenswrapper[4921]: I1210 13:24:25.192472 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:24:25 crc kubenswrapper[4921]: E1210 13:24:25.193277 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:24:39 crc kubenswrapper[4921]: I1210 13:24:39.192748 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:24:39 crc kubenswrapper[4921]: E1210 13:24:39.193508 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:24:44 crc kubenswrapper[4921]: I1210 13:24:44.448079 4921 generic.go:334] "Generic (PLEG): container finished" podID="6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" containerID="2d1488ad361ce8310e11fbffac6fe9fd9e4c72defff1c46ec9d836430983b52d" exitCode=0 Dec 10 13:24:44 crc kubenswrapper[4921]: I1210 13:24:44.448156 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" event={"ID":"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7","Type":"ContainerDied","Data":"2d1488ad361ce8310e11fbffac6fe9fd9e4c72defff1c46ec9d836430983b52d"} Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.454647 4921 scope.go:117] "RemoveContainer" containerID="81fe7a13737ffb33b1b7cc13699d92e3ae5722db7489dceacb430555d0947771" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.485454 4921 scope.go:117] "RemoveContainer" containerID="cab9ecb846d4d5646db806729a6cee52da95c7302f0fa93b4a45ccde471ba547" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.552104 4921 scope.go:117] "RemoveContainer" containerID="0248f3657c74122a43bc2b90085029a7b455e9e0f0c3fdf0fbb3081d883cdfc1" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.592461 4921 scope.go:117] "RemoveContainer" containerID="914102e9bbdf78748893ba40fbf1646a5cd324532d8bfac706c999cf0f611174" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.660573 4921 scope.go:117] "RemoveContainer" containerID="89b36f5012a2e6af1c4107db3f04f3989e759a873cfe37fdd7729e56d6414781" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.770023 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.950535 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-inventory\") pod \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.950804 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b582c\" (UniqueName: \"kubernetes.io/projected/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-kube-api-access-b582c\") pod \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.950940 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-ssh-key\") pod \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\" (UID: \"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7\") " Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.956209 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-kube-api-access-b582c" (OuterVolumeSpecName: "kube-api-access-b582c") pod "6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" (UID: "6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7"). InnerVolumeSpecName "kube-api-access-b582c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.975541 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" (UID: "6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:24:45 crc kubenswrapper[4921]: I1210 13:24:45.977022 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-inventory" (OuterVolumeSpecName: "inventory") pod "6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" (UID: "6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.053385 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.053453 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.053475 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b582c\" (UniqueName: \"kubernetes.io/projected/6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7-kube-api-access-b582c\") on node \"crc\" DevicePath \"\"" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.471168 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" event={"ID":"6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7","Type":"ContainerDied","Data":"5b13044877218ed12e79761516423474b45d6c2990cefa0bd6f6fe47acc6735e"} Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.472332 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b13044877218ed12e79761516423474b45d6c2990cefa0bd6f6fe47acc6735e" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.471506 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-627br" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.566004 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn"] Dec 10 13:24:46 crc kubenswrapper[4921]: E1210 13:24:46.566419 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.566441 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.566674 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.567722 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.571998 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.572184 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.572592 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.572816 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.573888 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn"] Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.765625 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.765766 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djst4\" (UniqueName: \"kubernetes.io/projected/05f95f16-6d6b-4820-bdc1-956651c51519-kube-api-access-djst4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.765901 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.867649 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.867726 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.867824 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djst4\" (UniqueName: \"kubernetes.io/projected/05f95f16-6d6b-4820-bdc1-956651c51519-kube-api-access-djst4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.882353 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.882436 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.899145 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djst4\" (UniqueName: \"kubernetes.io/projected/05f95f16-6d6b-4820-bdc1-956651c51519-kube-api-access-djst4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-whdnn\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:46 crc kubenswrapper[4921]: I1210 13:24:46.921988 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:47 crc kubenswrapper[4921]: I1210 13:24:47.041490 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-nqvc7"] Dec 10 13:24:47 crc kubenswrapper[4921]: I1210 13:24:47.055427 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-nqvc7"] Dec 10 13:24:47 crc kubenswrapper[4921]: I1210 13:24:47.206524 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="049815fe-e8f8-45c6-9360-d2d331fa8cd3" path="/var/lib/kubelet/pods/049815fe-e8f8-45c6-9360-d2d331fa8cd3/volumes" Dec 10 13:24:48 crc kubenswrapper[4921]: I1210 13:24:47.504739 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn"] Dec 10 13:24:48 crc kubenswrapper[4921]: I1210 13:24:48.487717 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" event={"ID":"05f95f16-6d6b-4820-bdc1-956651c51519","Type":"ContainerStarted","Data":"72975f73a01492ef5c6fea2d8f62440d1996e2ee579781949d6d28e38340bd01"} Dec 10 13:24:48 crc kubenswrapper[4921]: I1210 13:24:48.487755 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" event={"ID":"05f95f16-6d6b-4820-bdc1-956651c51519","Type":"ContainerStarted","Data":"62521af19296674911a6a74e3835556887a17392e44acf5a40200f9fe14a5905"} Dec 10 13:24:48 crc kubenswrapper[4921]: I1210 13:24:48.504766 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" podStartSLOduration=2.041786425 podStartE2EDuration="2.504747553s" podCreationTimestamp="2025-12-10 13:24:46 +0000 UTC" firstStartedPulling="2025-12-10 13:24:47.52036382 +0000 UTC m=+1684.736585754" lastFinishedPulling="2025-12-10 13:24:47.983324958 +0000 UTC m=+1685.199546882" observedRunningTime="2025-12-10 13:24:48.502684837 +0000 UTC m=+1685.718906761" watchObservedRunningTime="2025-12-10 13:24:48.504747553 +0000 UTC m=+1685.720969497" Dec 10 13:24:51 crc kubenswrapper[4921]: I1210 13:24:51.192520 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:24:51 crc kubenswrapper[4921]: E1210 13:24:51.193028 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:24:53 crc kubenswrapper[4921]: I1210 13:24:53.530538 4921 generic.go:334] "Generic (PLEG): container finished" podID="05f95f16-6d6b-4820-bdc1-956651c51519" containerID="72975f73a01492ef5c6fea2d8f62440d1996e2ee579781949d6d28e38340bd01" exitCode=0 Dec 10 13:24:53 crc kubenswrapper[4921]: I1210 13:24:53.530638 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" event={"ID":"05f95f16-6d6b-4820-bdc1-956651c51519","Type":"ContainerDied","Data":"72975f73a01492ef5c6fea2d8f62440d1996e2ee579781949d6d28e38340bd01"} Dec 10 13:24:54 crc kubenswrapper[4921]: I1210 13:24:54.984456 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.120714 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-ssh-key\") pod \"05f95f16-6d6b-4820-bdc1-956651c51519\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.120887 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-inventory\") pod \"05f95f16-6d6b-4820-bdc1-956651c51519\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.120918 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djst4\" (UniqueName: \"kubernetes.io/projected/05f95f16-6d6b-4820-bdc1-956651c51519-kube-api-access-djst4\") pod \"05f95f16-6d6b-4820-bdc1-956651c51519\" (UID: \"05f95f16-6d6b-4820-bdc1-956651c51519\") " Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.128384 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05f95f16-6d6b-4820-bdc1-956651c51519-kube-api-access-djst4" (OuterVolumeSpecName: "kube-api-access-djst4") pod "05f95f16-6d6b-4820-bdc1-956651c51519" (UID: "05f95f16-6d6b-4820-bdc1-956651c51519"). InnerVolumeSpecName "kube-api-access-djst4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.154992 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "05f95f16-6d6b-4820-bdc1-956651c51519" (UID: "05f95f16-6d6b-4820-bdc1-956651c51519"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.160832 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-inventory" (OuterVolumeSpecName: "inventory") pod "05f95f16-6d6b-4820-bdc1-956651c51519" (UID: "05f95f16-6d6b-4820-bdc1-956651c51519"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.222746 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.222785 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djst4\" (UniqueName: \"kubernetes.io/projected/05f95f16-6d6b-4820-bdc1-956651c51519-kube-api-access-djst4\") on node \"crc\" DevicePath \"\"" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.222799 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/05f95f16-6d6b-4820-bdc1-956651c51519-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.553857 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" event={"ID":"05f95f16-6d6b-4820-bdc1-956651c51519","Type":"ContainerDied","Data":"62521af19296674911a6a74e3835556887a17392e44acf5a40200f9fe14a5905"} Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.553907 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62521af19296674911a6a74e3835556887a17392e44acf5a40200f9fe14a5905" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.553968 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-whdnn" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.637433 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc"] Dec 10 13:24:55 crc kubenswrapper[4921]: E1210 13:24:55.637852 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f95f16-6d6b-4820-bdc1-956651c51519" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.637867 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f95f16-6d6b-4820-bdc1-956651c51519" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.638029 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f95f16-6d6b-4820-bdc1-956651c51519" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.638558 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.641655 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.642001 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.642279 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.642284 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.695622 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc"] Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.732638 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.732943 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.733247 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfbgc\" (UniqueName: \"kubernetes.io/projected/6d9b88b4-c132-455f-94e1-742726a6bdf8-kube-api-access-dfbgc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.835309 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfbgc\" (UniqueName: \"kubernetes.io/projected/6d9b88b4-c132-455f-94e1-742726a6bdf8-kube-api-access-dfbgc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.835467 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.835550 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.846629 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.848029 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.865133 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfbgc\" (UniqueName: \"kubernetes.io/projected/6d9b88b4-c132-455f-94e1-742726a6bdf8-kube-api-access-dfbgc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szqlc\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:55 crc kubenswrapper[4921]: I1210 13:24:55.955673 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:24:56 crc kubenswrapper[4921]: I1210 13:24:56.272854 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc"] Dec 10 13:24:56 crc kubenswrapper[4921]: I1210 13:24:56.562428 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" event={"ID":"6d9b88b4-c132-455f-94e1-742726a6bdf8","Type":"ContainerStarted","Data":"7826a76bbd3fc540f038fdd7e15bf96d76bed87c09dc41e2b88994f82acd964c"} Dec 10 13:24:58 crc kubenswrapper[4921]: I1210 13:24:58.584438 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" event={"ID":"6d9b88b4-c132-455f-94e1-742726a6bdf8","Type":"ContainerStarted","Data":"14cfaf23ad27a54341e69d8e45891e877615706a6c7a5483b8e9b0eca6b28a48"} Dec 10 13:24:58 crc kubenswrapper[4921]: I1210 13:24:58.612341 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" podStartSLOduration=2.451864224 podStartE2EDuration="3.6123209s" podCreationTimestamp="2025-12-10 13:24:55 +0000 UTC" firstStartedPulling="2025-12-10 13:24:56.291226773 +0000 UTC m=+1693.507448697" lastFinishedPulling="2025-12-10 13:24:57.451683419 +0000 UTC m=+1694.667905373" observedRunningTime="2025-12-10 13:24:58.603085912 +0000 UTC m=+1695.819307916" watchObservedRunningTime="2025-12-10 13:24:58.6123209 +0000 UTC m=+1695.828542834" Dec 10 13:25:04 crc kubenswrapper[4921]: I1210 13:25:04.192240 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:25:04 crc kubenswrapper[4921]: E1210 13:25:04.193284 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:25:17 crc kubenswrapper[4921]: I1210 13:25:17.192907 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:25:17 crc kubenswrapper[4921]: E1210 13:25:17.193724 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.087998 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-w99v8"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.107058 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-fa22-account-create-update-tzvqm"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.114580 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-86e3-account-create-update-lp5pz"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.123529 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pqljd"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.131439 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-rv5zq"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.138582 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1e5d-account-create-update-27txp"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.144650 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-w99v8"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.150909 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-fa22-account-create-update-tzvqm"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.156794 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-rv5zq"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.162374 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-86e3-account-create-update-lp5pz"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.167899 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pqljd"] Dec 10 13:25:18 crc kubenswrapper[4921]: I1210 13:25:18.173262 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1e5d-account-create-update-27txp"] Dec 10 13:25:19 crc kubenswrapper[4921]: I1210 13:25:19.203913 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0837454b-bf93-4505-b41e-6a6c7ddd5793" path="/var/lib/kubelet/pods/0837454b-bf93-4505-b41e-6a6c7ddd5793/volumes" Dec 10 13:25:19 crc kubenswrapper[4921]: I1210 13:25:19.204657 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1858d166-0eea-410c-ba8a-69f9968e698e" path="/var/lib/kubelet/pods/1858d166-0eea-410c-ba8a-69f9968e698e/volumes" Dec 10 13:25:19 crc kubenswrapper[4921]: I1210 13:25:19.205304 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53d5a690-c61b-4dfd-be05-b5bfca7f4adf" path="/var/lib/kubelet/pods/53d5a690-c61b-4dfd-be05-b5bfca7f4adf/volumes" Dec 10 13:25:19 crc kubenswrapper[4921]: I1210 13:25:19.205980 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a270057-fdbe-4726-9b34-6d42b47027fd" path="/var/lib/kubelet/pods/7a270057-fdbe-4726-9b34-6d42b47027fd/volumes" Dec 10 13:25:19 crc kubenswrapper[4921]: I1210 13:25:19.207217 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bca7d85c-d634-42fd-95c4-1b8955ff1ac1" path="/var/lib/kubelet/pods/bca7d85c-d634-42fd-95c4-1b8955ff1ac1/volumes" Dec 10 13:25:19 crc kubenswrapper[4921]: I1210 13:25:19.208019 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d43add4e-46f3-4485-815e-363015a4d1b0" path="/var/lib/kubelet/pods/d43add4e-46f3-4485-815e-363015a4d1b0/volumes" Dec 10 13:25:28 crc kubenswrapper[4921]: I1210 13:25:28.193720 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:25:28 crc kubenswrapper[4921]: E1210 13:25:28.194483 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:25:43 crc kubenswrapper[4921]: I1210 13:25:43.197977 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:25:43 crc kubenswrapper[4921]: E1210 13:25:43.199021 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:25:43 crc kubenswrapper[4921]: I1210 13:25:43.948825 4921 generic.go:334] "Generic (PLEG): container finished" podID="6d9b88b4-c132-455f-94e1-742726a6bdf8" containerID="14cfaf23ad27a54341e69d8e45891e877615706a6c7a5483b8e9b0eca6b28a48" exitCode=0 Dec 10 13:25:43 crc kubenswrapper[4921]: I1210 13:25:43.949081 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" event={"ID":"6d9b88b4-c132-455f-94e1-742726a6bdf8","Type":"ContainerDied","Data":"14cfaf23ad27a54341e69d8e45891e877615706a6c7a5483b8e9b0eca6b28a48"} Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.309567 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.487016 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-ssh-key\") pod \"6d9b88b4-c132-455f-94e1-742726a6bdf8\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.487412 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-inventory\") pod \"6d9b88b4-c132-455f-94e1-742726a6bdf8\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.487552 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfbgc\" (UniqueName: \"kubernetes.io/projected/6d9b88b4-c132-455f-94e1-742726a6bdf8-kube-api-access-dfbgc\") pod \"6d9b88b4-c132-455f-94e1-742726a6bdf8\" (UID: \"6d9b88b4-c132-455f-94e1-742726a6bdf8\") " Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.497046 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d9b88b4-c132-455f-94e1-742726a6bdf8-kube-api-access-dfbgc" (OuterVolumeSpecName: "kube-api-access-dfbgc") pod "6d9b88b4-c132-455f-94e1-742726a6bdf8" (UID: "6d9b88b4-c132-455f-94e1-742726a6bdf8"). InnerVolumeSpecName "kube-api-access-dfbgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.516063 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-inventory" (OuterVolumeSpecName: "inventory") pod "6d9b88b4-c132-455f-94e1-742726a6bdf8" (UID: "6d9b88b4-c132-455f-94e1-742726a6bdf8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.517733 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d9b88b4-c132-455f-94e1-742726a6bdf8" (UID: "6d9b88b4-c132-455f-94e1-742726a6bdf8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.589775 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfbgc\" (UniqueName: \"kubernetes.io/projected/6d9b88b4-c132-455f-94e1-742726a6bdf8-kube-api-access-dfbgc\") on node \"crc\" DevicePath \"\"" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.590014 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.590151 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d9b88b4-c132-455f-94e1-742726a6bdf8-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.799437 4921 scope.go:117] "RemoveContainer" containerID="864be27166ab66d9bca75e27fe36fc341708f8cbe8db20d9b124fe94033f650a" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.846021 4921 scope.go:117] "RemoveContainer" containerID="202f6a3dd4ed014edd57d29142e0cf0c000a01ab8f9c0595bf692fee1a0481b3" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.875905 4921 scope.go:117] "RemoveContainer" containerID="4e2009f913f12810e76ace5768c5e89bfca792ea46f2f87f96c5c78fb74c7758" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.901409 4921 scope.go:117] "RemoveContainer" containerID="c5dba9f00fd76539a9ff655700814bb6c564608cdef33cdf30895858ea4debb5" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.921245 4921 scope.go:117] "RemoveContainer" containerID="d0e826f63415809cb628deae248d6993ddd3ad7d6d5323b966b965139a1136af" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.941716 4921 scope.go:117] "RemoveContainer" containerID="42b53efb98d353e6ce6f226b70dfec2b5db6da89a424777eaec95aaa10a80ed4" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.964495 4921 scope.go:117] "RemoveContainer" containerID="2fca88647a2c15e9928330cc72846624e630fe22cad55c0a5e782e7586e241b8" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.977867 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" event={"ID":"6d9b88b4-c132-455f-94e1-742726a6bdf8","Type":"ContainerDied","Data":"7826a76bbd3fc540f038fdd7e15bf96d76bed87c09dc41e2b88994f82acd964c"} Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.977988 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7826a76bbd3fc540f038fdd7e15bf96d76bed87c09dc41e2b88994f82acd964c" Dec 10 13:25:45 crc kubenswrapper[4921]: I1210 13:25:45.978061 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szqlc" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.060408 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9"] Dec 10 13:25:46 crc kubenswrapper[4921]: E1210 13:25:46.060812 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d9b88b4-c132-455f-94e1-742726a6bdf8" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.060830 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d9b88b4-c132-455f-94e1-742726a6bdf8" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.061008 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d9b88b4-c132-455f-94e1-742726a6bdf8" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.061567 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.065179 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.065211 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.066957 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.067611 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.071203 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9"] Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.097073 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqnhd\" (UniqueName: \"kubernetes.io/projected/1b920f02-bacb-47ab-b4d3-8650f287bd0a-kube-api-access-mqnhd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.097160 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.097257 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.199209 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqnhd\" (UniqueName: \"kubernetes.io/projected/1b920f02-bacb-47ab-b4d3-8650f287bd0a-kube-api-access-mqnhd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.199272 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.199339 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.204797 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.205235 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.219597 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqnhd\" (UniqueName: \"kubernetes.io/projected/1b920f02-bacb-47ab-b4d3-8650f287bd0a-kube-api-access-mqnhd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.393859 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.907506 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9"] Dec 10 13:25:46 crc kubenswrapper[4921]: I1210 13:25:46.994019 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" event={"ID":"1b920f02-bacb-47ab-b4d3-8650f287bd0a","Type":"ContainerStarted","Data":"2c37332575f9c89a323b5e0ea12d1d0b3bf0649b19ed10a7f8e1ecb9e9f4c383"} Dec 10 13:25:47 crc kubenswrapper[4921]: I1210 13:25:47.037687 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ftfd5"] Dec 10 13:25:47 crc kubenswrapper[4921]: I1210 13:25:47.044773 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ftfd5"] Dec 10 13:25:47 crc kubenswrapper[4921]: I1210 13:25:47.202484 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54815de8-8713-4fac-b91a-3ee4ecb8a068" path="/var/lib/kubelet/pods/54815de8-8713-4fac-b91a-3ee4ecb8a068/volumes" Dec 10 13:25:48 crc kubenswrapper[4921]: I1210 13:25:48.005883 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" event={"ID":"1b920f02-bacb-47ab-b4d3-8650f287bd0a","Type":"ContainerStarted","Data":"9106e5ee8addd7b30e5e21568a88ab702276c55d0e5c795b4f9908b734d97290"} Dec 10 13:25:52 crc kubenswrapper[4921]: I1210 13:25:52.049308 4921 generic.go:334] "Generic (PLEG): container finished" podID="1b920f02-bacb-47ab-b4d3-8650f287bd0a" containerID="9106e5ee8addd7b30e5e21568a88ab702276c55d0e5c795b4f9908b734d97290" exitCode=0 Dec 10 13:25:52 crc kubenswrapper[4921]: I1210 13:25:52.049533 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" event={"ID":"1b920f02-bacb-47ab-b4d3-8650f287bd0a","Type":"ContainerDied","Data":"9106e5ee8addd7b30e5e21568a88ab702276c55d0e5c795b4f9908b734d97290"} Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.642161 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.834186 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqnhd\" (UniqueName: \"kubernetes.io/projected/1b920f02-bacb-47ab-b4d3-8650f287bd0a-kube-api-access-mqnhd\") pod \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.834289 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-ssh-key\") pod \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.834587 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-inventory\") pod \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\" (UID: \"1b920f02-bacb-47ab-b4d3-8650f287bd0a\") " Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.844000 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b920f02-bacb-47ab-b4d3-8650f287bd0a-kube-api-access-mqnhd" (OuterVolumeSpecName: "kube-api-access-mqnhd") pod "1b920f02-bacb-47ab-b4d3-8650f287bd0a" (UID: "1b920f02-bacb-47ab-b4d3-8650f287bd0a"). InnerVolumeSpecName "kube-api-access-mqnhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.857861 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1b920f02-bacb-47ab-b4d3-8650f287bd0a" (UID: "1b920f02-bacb-47ab-b4d3-8650f287bd0a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.866297 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-inventory" (OuterVolumeSpecName: "inventory") pod "1b920f02-bacb-47ab-b4d3-8650f287bd0a" (UID: "1b920f02-bacb-47ab-b4d3-8650f287bd0a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.936210 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.936247 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b920f02-bacb-47ab-b4d3-8650f287bd0a-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:25:53 crc kubenswrapper[4921]: I1210 13:25:53.936259 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqnhd\" (UniqueName: \"kubernetes.io/projected/1b920f02-bacb-47ab-b4d3-8650f287bd0a-kube-api-access-mqnhd\") on node \"crc\" DevicePath \"\"" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.070693 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" event={"ID":"1b920f02-bacb-47ab-b4d3-8650f287bd0a","Type":"ContainerDied","Data":"2c37332575f9c89a323b5e0ea12d1d0b3bf0649b19ed10a7f8e1ecb9e9f4c383"} Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.070975 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c37332575f9c89a323b5e0ea12d1d0b3bf0649b19ed10a7f8e1ecb9e9f4c383" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.071024 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.151675 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz"] Dec 10 13:25:54 crc kubenswrapper[4921]: E1210 13:25:54.152016 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b920f02-bacb-47ab-b4d3-8650f287bd0a" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.152030 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b920f02-bacb-47ab-b4d3-8650f287bd0a" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.152249 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b920f02-bacb-47ab-b4d3-8650f287bd0a" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.152859 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.155620 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.155645 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.158470 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz"] Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.159745 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.161339 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.354618 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.354675 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqxg7\" (UniqueName: \"kubernetes.io/projected/3f964510-c89d-47da-ae1f-0f29cbde809f-kube-api-access-lqxg7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.354705 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.456208 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqxg7\" (UniqueName: \"kubernetes.io/projected/3f964510-c89d-47da-ae1f-0f29cbde809f-kube-api-access-lqxg7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.456280 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.456484 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.461086 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.461441 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.480805 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqxg7\" (UniqueName: \"kubernetes.io/projected/3f964510-c89d-47da-ae1f-0f29cbde809f-kube-api-access-lqxg7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:54 crc kubenswrapper[4921]: I1210 13:25:54.778039 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:25:55 crc kubenswrapper[4921]: I1210 13:25:55.295060 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz"] Dec 10 13:25:56 crc kubenswrapper[4921]: I1210 13:25:56.087285 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" event={"ID":"3f964510-c89d-47da-ae1f-0f29cbde809f","Type":"ContainerStarted","Data":"411e0628b1df3bd27555b22672a2ef319b3f2639ff7eeedcd96b4fa015a38692"} Dec 10 13:25:57 crc kubenswrapper[4921]: I1210 13:25:57.096162 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" event={"ID":"3f964510-c89d-47da-ae1f-0f29cbde809f","Type":"ContainerStarted","Data":"6e0c6a88184b1a2babba91f80ea2ef9907ebfc4607ee58e4fd4ee8fe724b7afe"} Dec 10 13:25:57 crc kubenswrapper[4921]: I1210 13:25:57.193100 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:25:57 crc kubenswrapper[4921]: E1210 13:25:57.193301 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:26:10 crc kubenswrapper[4921]: I1210 13:26:10.030197 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" podStartSLOduration=14.674933638 podStartE2EDuration="16.030177498s" podCreationTimestamp="2025-12-10 13:25:54 +0000 UTC" firstStartedPulling="2025-12-10 13:25:55.282920666 +0000 UTC m=+1752.499142580" lastFinishedPulling="2025-12-10 13:25:56.638164506 +0000 UTC m=+1753.854386440" observedRunningTime="2025-12-10 13:25:57.118631113 +0000 UTC m=+1754.334853047" watchObservedRunningTime="2025-12-10 13:26:10.030177498 +0000 UTC m=+1767.246399422" Dec 10 13:26:10 crc kubenswrapper[4921]: I1210 13:26:10.037264 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-69vnb"] Dec 10 13:26:10 crc kubenswrapper[4921]: I1210 13:26:10.044962 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-69vnb"] Dec 10 13:26:11 crc kubenswrapper[4921]: I1210 13:26:11.045028 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svrtf"] Dec 10 13:26:11 crc kubenswrapper[4921]: I1210 13:26:11.052712 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-svrtf"] Dec 10 13:26:11 crc kubenswrapper[4921]: I1210 13:26:11.193234 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:26:11 crc kubenswrapper[4921]: E1210 13:26:11.193638 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:26:11 crc kubenswrapper[4921]: I1210 13:26:11.220531 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38908ed6-c218-40db-8cec-eed17dbde6e4" path="/var/lib/kubelet/pods/38908ed6-c218-40db-8cec-eed17dbde6e4/volumes" Dec 10 13:26:11 crc kubenswrapper[4921]: I1210 13:26:11.221898 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f5951c8-dfbf-4a11-aeb8-f95531cafe8c" path="/var/lib/kubelet/pods/8f5951c8-dfbf-4a11-aeb8-f95531cafe8c/volumes" Dec 10 13:26:24 crc kubenswrapper[4921]: I1210 13:26:24.192765 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:26:24 crc kubenswrapper[4921]: E1210 13:26:24.193864 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:26:37 crc kubenswrapper[4921]: I1210 13:26:37.193658 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:26:37 crc kubenswrapper[4921]: E1210 13:26:37.194483 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:26:46 crc kubenswrapper[4921]: I1210 13:26:46.168586 4921 scope.go:117] "RemoveContainer" containerID="8add9dd574ba305d2fde1ac418eef025d995828e1602740c6c907468b38b3774" Dec 10 13:26:46 crc kubenswrapper[4921]: I1210 13:26:46.220214 4921 scope.go:117] "RemoveContainer" containerID="5f3a96948da2e8010c78c048ec4bbc5f1566e6effc5adcd18ccbd75b69386c1f" Dec 10 13:26:46 crc kubenswrapper[4921]: I1210 13:26:46.317254 4921 scope.go:117] "RemoveContainer" containerID="f8d9026a39c7ffd4d66826e57d9fc56bfcd85fc8e815a1ac41ed39bbbac753ac" Dec 10 13:26:49 crc kubenswrapper[4921]: I1210 13:26:49.192652 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:26:49 crc kubenswrapper[4921]: E1210 13:26:49.193346 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:26:55 crc kubenswrapper[4921]: I1210 13:26:55.050180 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-dx9x6"] Dec 10 13:26:55 crc kubenswrapper[4921]: I1210 13:26:55.060924 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-dx9x6"] Dec 10 13:26:55 crc kubenswrapper[4921]: I1210 13:26:55.208935 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd" path="/var/lib/kubelet/pods/b4fa87ce-fd97-46de-b9d2-3f1f544e6ebd/volumes" Dec 10 13:26:55 crc kubenswrapper[4921]: I1210 13:26:55.579537 4921 generic.go:334] "Generic (PLEG): container finished" podID="3f964510-c89d-47da-ae1f-0f29cbde809f" containerID="6e0c6a88184b1a2babba91f80ea2ef9907ebfc4607ee58e4fd4ee8fe724b7afe" exitCode=0 Dec 10 13:26:55 crc kubenswrapper[4921]: I1210 13:26:55.579580 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" event={"ID":"3f964510-c89d-47da-ae1f-0f29cbde809f","Type":"ContainerDied","Data":"6e0c6a88184b1a2babba91f80ea2ef9907ebfc4607ee58e4fd4ee8fe724b7afe"} Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.025751 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.070081 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqxg7\" (UniqueName: \"kubernetes.io/projected/3f964510-c89d-47da-ae1f-0f29cbde809f-kube-api-access-lqxg7\") pod \"3f964510-c89d-47da-ae1f-0f29cbde809f\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.070152 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-ssh-key\") pod \"3f964510-c89d-47da-ae1f-0f29cbde809f\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.070407 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-inventory\") pod \"3f964510-c89d-47da-ae1f-0f29cbde809f\" (UID: \"3f964510-c89d-47da-ae1f-0f29cbde809f\") " Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.077531 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f964510-c89d-47da-ae1f-0f29cbde809f-kube-api-access-lqxg7" (OuterVolumeSpecName: "kube-api-access-lqxg7") pod "3f964510-c89d-47da-ae1f-0f29cbde809f" (UID: "3f964510-c89d-47da-ae1f-0f29cbde809f"). InnerVolumeSpecName "kube-api-access-lqxg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.102886 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-inventory" (OuterVolumeSpecName: "inventory") pod "3f964510-c89d-47da-ae1f-0f29cbde809f" (UID: "3f964510-c89d-47da-ae1f-0f29cbde809f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.103220 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f964510-c89d-47da-ae1f-0f29cbde809f" (UID: "3f964510-c89d-47da-ae1f-0f29cbde809f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.172834 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.172891 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqxg7\" (UniqueName: \"kubernetes.io/projected/3f964510-c89d-47da-ae1f-0f29cbde809f-kube-api-access-lqxg7\") on node \"crc\" DevicePath \"\"" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.172903 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f964510-c89d-47da-ae1f-0f29cbde809f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.601610 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" event={"ID":"3f964510-c89d-47da-ae1f-0f29cbde809f","Type":"ContainerDied","Data":"411e0628b1df3bd27555b22672a2ef319b3f2639ff7eeedcd96b4fa015a38692"} Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.601660 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.601667 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="411e0628b1df3bd27555b22672a2ef319b3f2639ff7eeedcd96b4fa015a38692" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.689963 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hcvmh"] Dec 10 13:26:57 crc kubenswrapper[4921]: E1210 13:26:57.690287 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f964510-c89d-47da-ae1f-0f29cbde809f" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.690305 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f964510-c89d-47da-ae1f-0f29cbde809f" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.690496 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f964510-c89d-47da-ae1f-0f29cbde809f" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.691033 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.693827 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.695501 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.695988 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.696147 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.708240 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hcvmh"] Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.781856 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.781948 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt565\" (UniqueName: \"kubernetes.io/projected/0cbeca26-1132-4f10-b871-6c520af4ad1b-kube-api-access-lt565\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.781979 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.883331 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.883961 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt565\" (UniqueName: \"kubernetes.io/projected/0cbeca26-1132-4f10-b871-6c520af4ad1b-kube-api-access-lt565\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.884113 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.889066 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.891882 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:57 crc kubenswrapper[4921]: I1210 13:26:57.904587 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt565\" (UniqueName: \"kubernetes.io/projected/0cbeca26-1132-4f10-b871-6c520af4ad1b-kube-api-access-lt565\") pod \"ssh-known-hosts-edpm-deployment-hcvmh\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:58 crc kubenswrapper[4921]: I1210 13:26:58.013692 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:26:58 crc kubenswrapper[4921]: I1210 13:26:58.538534 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hcvmh"] Dec 10 13:26:58 crc kubenswrapper[4921]: I1210 13:26:58.548427 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:26:58 crc kubenswrapper[4921]: I1210 13:26:58.608144 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" event={"ID":"0cbeca26-1132-4f10-b871-6c520af4ad1b","Type":"ContainerStarted","Data":"18cd0b5056f7ca7ebe891fb43ed4de3949f7412e83a64054d7394aaa1d12b979"} Dec 10 13:26:59 crc kubenswrapper[4921]: I1210 13:26:59.620320 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" event={"ID":"0cbeca26-1132-4f10-b871-6c520af4ad1b","Type":"ContainerStarted","Data":"443fac99d9d2069a0c720a02bf5362fd548bf99b78646c36e57cd8184b4fe878"} Dec 10 13:26:59 crc kubenswrapper[4921]: I1210 13:26:59.641410 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" podStartSLOduration=2.21872778 podStartE2EDuration="2.641377626s" podCreationTimestamp="2025-12-10 13:26:57 +0000 UTC" firstStartedPulling="2025-12-10 13:26:58.547844625 +0000 UTC m=+1815.764066549" lastFinishedPulling="2025-12-10 13:26:58.970494451 +0000 UTC m=+1816.186716395" observedRunningTime="2025-12-10 13:26:59.636096914 +0000 UTC m=+1816.852318838" watchObservedRunningTime="2025-12-10 13:26:59.641377626 +0000 UTC m=+1816.857599550" Dec 10 13:27:00 crc kubenswrapper[4921]: I1210 13:27:00.193832 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:27:00 crc kubenswrapper[4921]: E1210 13:27:00.194089 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:27:07 crc kubenswrapper[4921]: I1210 13:27:07.684715 4921 generic.go:334] "Generic (PLEG): container finished" podID="0cbeca26-1132-4f10-b871-6c520af4ad1b" containerID="443fac99d9d2069a0c720a02bf5362fd548bf99b78646c36e57cd8184b4fe878" exitCode=0 Dec 10 13:27:07 crc kubenswrapper[4921]: I1210 13:27:07.684777 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" event={"ID":"0cbeca26-1132-4f10-b871-6c520af4ad1b","Type":"ContainerDied","Data":"443fac99d9d2069a0c720a02bf5362fd548bf99b78646c36e57cd8184b4fe878"} Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.097892 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.186912 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-inventory-0\") pod \"0cbeca26-1132-4f10-b871-6c520af4ad1b\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.186991 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt565\" (UniqueName: \"kubernetes.io/projected/0cbeca26-1132-4f10-b871-6c520af4ad1b-kube-api-access-lt565\") pod \"0cbeca26-1132-4f10-b871-6c520af4ad1b\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.187037 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-ssh-key-openstack-edpm-ipam\") pod \"0cbeca26-1132-4f10-b871-6c520af4ad1b\" (UID: \"0cbeca26-1132-4f10-b871-6c520af4ad1b\") " Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.215861 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbeca26-1132-4f10-b871-6c520af4ad1b-kube-api-access-lt565" (OuterVolumeSpecName: "kube-api-access-lt565") pod "0cbeca26-1132-4f10-b871-6c520af4ad1b" (UID: "0cbeca26-1132-4f10-b871-6c520af4ad1b"). InnerVolumeSpecName "kube-api-access-lt565". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.221510 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0cbeca26-1132-4f10-b871-6c520af4ad1b" (UID: "0cbeca26-1132-4f10-b871-6c520af4ad1b"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.230642 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0cbeca26-1132-4f10-b871-6c520af4ad1b" (UID: "0cbeca26-1132-4f10-b871-6c520af4ad1b"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.289262 4921 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.289591 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt565\" (UniqueName: \"kubernetes.io/projected/0cbeca26-1132-4f10-b871-6c520af4ad1b-kube-api-access-lt565\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.289841 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0cbeca26-1132-4f10-b871-6c520af4ad1b-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.704811 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" event={"ID":"0cbeca26-1132-4f10-b871-6c520af4ad1b","Type":"ContainerDied","Data":"18cd0b5056f7ca7ebe891fb43ed4de3949f7412e83a64054d7394aaa1d12b979"} Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.704846 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hcvmh" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.704855 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18cd0b5056f7ca7ebe891fb43ed4de3949f7412e83a64054d7394aaa1d12b979" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.774654 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7"] Dec 10 13:27:09 crc kubenswrapper[4921]: E1210 13:27:09.775010 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbeca26-1132-4f10-b871-6c520af4ad1b" containerName="ssh-known-hosts-edpm-deployment" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.775027 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbeca26-1132-4f10-b871-6c520af4ad1b" containerName="ssh-known-hosts-edpm-deployment" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.775219 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbeca26-1132-4f10-b871-6c520af4ad1b" containerName="ssh-known-hosts-edpm-deployment" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.775947 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.778980 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.779231 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.782225 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.782465 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.789581 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7"] Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.797915 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.798064 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89pm5\" (UniqueName: \"kubernetes.io/projected/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-kube-api-access-89pm5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.798106 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.899114 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89pm5\" (UniqueName: \"kubernetes.io/projected/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-kube-api-access-89pm5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.899190 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.899233 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.903563 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.911115 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:09 crc kubenswrapper[4921]: I1210 13:27:09.927135 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89pm5\" (UniqueName: \"kubernetes.io/projected/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-kube-api-access-89pm5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-fnlg7\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:10 crc kubenswrapper[4921]: I1210 13:27:10.101668 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:10 crc kubenswrapper[4921]: I1210 13:27:10.677113 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7"] Dec 10 13:27:10 crc kubenswrapper[4921]: I1210 13:27:10.715980 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" event={"ID":"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4","Type":"ContainerStarted","Data":"5dce39615609b3d5f07f310fef9b56a09c0d3de783acdd976fd1eaa69e9c7004"} Dec 10 13:27:11 crc kubenswrapper[4921]: I1210 13:27:11.732920 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" event={"ID":"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4","Type":"ContainerStarted","Data":"9a27de99cbf4d7708b7f722bd534804e96991c40238bf5a0e1367f8b32c44db4"} Dec 10 13:27:13 crc kubenswrapper[4921]: I1210 13:27:13.200003 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:27:13 crc kubenswrapper[4921]: E1210 13:27:13.200645 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:27:19 crc kubenswrapper[4921]: I1210 13:27:19.792716 4921 generic.go:334] "Generic (PLEG): container finished" podID="215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" containerID="9a27de99cbf4d7708b7f722bd534804e96991c40238bf5a0e1367f8b32c44db4" exitCode=0 Dec 10 13:27:19 crc kubenswrapper[4921]: I1210 13:27:19.792757 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" event={"ID":"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4","Type":"ContainerDied","Data":"9a27de99cbf4d7708b7f722bd534804e96991c40238bf5a0e1367f8b32c44db4"} Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.221077 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.399430 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89pm5\" (UniqueName: \"kubernetes.io/projected/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-kube-api-access-89pm5\") pod \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.399699 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-ssh-key\") pod \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.399798 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-inventory\") pod \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\" (UID: \"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4\") " Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.404710 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-kube-api-access-89pm5" (OuterVolumeSpecName: "kube-api-access-89pm5") pod "215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" (UID: "215dabd9-d5f8-4c14-9ea0-cb5a516c25e4"). InnerVolumeSpecName "kube-api-access-89pm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.428065 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" (UID: "215dabd9-d5f8-4c14-9ea0-cb5a516c25e4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.428358 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-inventory" (OuterVolumeSpecName: "inventory") pod "215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" (UID: "215dabd9-d5f8-4c14-9ea0-cb5a516c25e4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.502186 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89pm5\" (UniqueName: \"kubernetes.io/projected/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-kube-api-access-89pm5\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.502218 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.502226 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/215dabd9-d5f8-4c14-9ea0-cb5a516c25e4-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.809470 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" event={"ID":"215dabd9-d5f8-4c14-9ea0-cb5a516c25e4","Type":"ContainerDied","Data":"5dce39615609b3d5f07f310fef9b56a09c0d3de783acdd976fd1eaa69e9c7004"} Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.809768 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dce39615609b3d5f07f310fef9b56a09c0d3de783acdd976fd1eaa69e9c7004" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.809511 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-fnlg7" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.918732 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt"] Dec 10 13:27:21 crc kubenswrapper[4921]: E1210 13:27:21.919124 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.919141 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.919307 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="215dabd9-d5f8-4c14-9ea0-cb5a516c25e4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.920924 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.923438 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.924479 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.929015 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.931857 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt"] Dec 10 13:27:21 crc kubenswrapper[4921]: I1210 13:27:21.933341 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-tgn2z" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.111603 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxdz\" (UniqueName: \"kubernetes.io/projected/253caf07-9604-44ed-951b-a2546b836755-kube-api-access-ppxdz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.111651 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.111785 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.213678 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxdz\" (UniqueName: \"kubernetes.io/projected/253caf07-9604-44ed-951b-a2546b836755-kube-api-access-ppxdz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.213732 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.213831 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.217895 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.223334 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.237493 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxdz\" (UniqueName: \"kubernetes.io/projected/253caf07-9604-44ed-951b-a2546b836755-kube-api-access-ppxdz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.274414 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.806074 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt"] Dec 10 13:27:22 crc kubenswrapper[4921]: I1210 13:27:22.821572 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" event={"ID":"253caf07-9604-44ed-951b-a2546b836755","Type":"ContainerStarted","Data":"0d4c66ecbcacfaa91a8029f595d9524300ad55b53ef386b8280e8f0b7c5e3eba"} Dec 10 13:27:25 crc kubenswrapper[4921]: I1210 13:27:25.846770 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" event={"ID":"253caf07-9604-44ed-951b-a2546b836755","Type":"ContainerStarted","Data":"e537b66441c0e1377c1a1eeb53c0ff6cdd4507e5e0a7401eeb1cf47891710781"} Dec 10 13:27:25 crc kubenswrapper[4921]: I1210 13:27:25.870191 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" podStartSLOduration=2.115412658 podStartE2EDuration="4.870162697s" podCreationTimestamp="2025-12-10 13:27:21 +0000 UTC" firstStartedPulling="2025-12-10 13:27:22.815421273 +0000 UTC m=+1840.031643197" lastFinishedPulling="2025-12-10 13:27:25.570171312 +0000 UTC m=+1842.786393236" observedRunningTime="2025-12-10 13:27:25.865137222 +0000 UTC m=+1843.081359166" watchObservedRunningTime="2025-12-10 13:27:25.870162697 +0000 UTC m=+1843.086384631" Dec 10 13:27:27 crc kubenswrapper[4921]: I1210 13:27:27.193302 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:27:27 crc kubenswrapper[4921]: E1210 13:27:27.194246 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:27:35 crc kubenswrapper[4921]: I1210 13:27:35.939803 4921 generic.go:334] "Generic (PLEG): container finished" podID="253caf07-9604-44ed-951b-a2546b836755" containerID="e537b66441c0e1377c1a1eeb53c0ff6cdd4507e5e0a7401eeb1cf47891710781" exitCode=0 Dec 10 13:27:35 crc kubenswrapper[4921]: I1210 13:27:35.939883 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" event={"ID":"253caf07-9604-44ed-951b-a2546b836755","Type":"ContainerDied","Data":"e537b66441c0e1377c1a1eeb53c0ff6cdd4507e5e0a7401eeb1cf47891710781"} Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.311944 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.342797 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-inventory\") pod \"253caf07-9604-44ed-951b-a2546b836755\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.342925 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-ssh-key\") pod \"253caf07-9604-44ed-951b-a2546b836755\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.342983 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppxdz\" (UniqueName: \"kubernetes.io/projected/253caf07-9604-44ed-951b-a2546b836755-kube-api-access-ppxdz\") pod \"253caf07-9604-44ed-951b-a2546b836755\" (UID: \"253caf07-9604-44ed-951b-a2546b836755\") " Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.354161 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/253caf07-9604-44ed-951b-a2546b836755-kube-api-access-ppxdz" (OuterVolumeSpecName: "kube-api-access-ppxdz") pod "253caf07-9604-44ed-951b-a2546b836755" (UID: "253caf07-9604-44ed-951b-a2546b836755"). InnerVolumeSpecName "kube-api-access-ppxdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.380482 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "253caf07-9604-44ed-951b-a2546b836755" (UID: "253caf07-9604-44ed-951b-a2546b836755"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.389551 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-inventory" (OuterVolumeSpecName: "inventory") pod "253caf07-9604-44ed-951b-a2546b836755" (UID: "253caf07-9604-44ed-951b-a2546b836755"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.444610 4921 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.444646 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppxdz\" (UniqueName: \"kubernetes.io/projected/253caf07-9604-44ed-951b-a2546b836755-kube-api-access-ppxdz\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.444663 4921 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/253caf07-9604-44ed-951b-a2546b836755-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.961001 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" event={"ID":"253caf07-9604-44ed-951b-a2546b836755","Type":"ContainerDied","Data":"0d4c66ecbcacfaa91a8029f595d9524300ad55b53ef386b8280e8f0b7c5e3eba"} Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.961051 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d4c66ecbcacfaa91a8029f595d9524300ad55b53ef386b8280e8f0b7c5e3eba" Dec 10 13:27:37 crc kubenswrapper[4921]: I1210 13:27:37.961104 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt" Dec 10 13:27:40 crc kubenswrapper[4921]: I1210 13:27:40.870245 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:27:40 crc kubenswrapper[4921]: E1210 13:27:40.873546 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:27:46 crc kubenswrapper[4921]: I1210 13:27:46.426695 4921 scope.go:117] "RemoveContainer" containerID="eb90798c4a10f8296339c8c963a4a07c8ea2bd08b1a4a6006dd9bdd38041087f" Dec 10 13:27:53 crc kubenswrapper[4921]: I1210 13:27:53.202770 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:27:53 crc kubenswrapper[4921]: I1210 13:27:53.988632 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"c43fd6b298db60ab5b71d2d16349d66c9cda54d132fbb8a931cadf7647f498e4"} Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.151934 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd"] Dec 10 13:30:00 crc kubenswrapper[4921]: E1210 13:30:00.152922 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="253caf07-9604-44ed-951b-a2546b836755" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.152941 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="253caf07-9604-44ed-951b-a2546b836755" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.153131 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="253caf07-9604-44ed-951b-a2546b836755" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.154009 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.156046 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.161547 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.176017 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd"] Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.305685 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6pqc\" (UniqueName: \"kubernetes.io/projected/6793b456-539c-4ce8-a3e6-b717e7702693-kube-api-access-d6pqc\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.305756 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6793b456-539c-4ce8-a3e6-b717e7702693-secret-volume\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.305781 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6793b456-539c-4ce8-a3e6-b717e7702693-config-volume\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.423554 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6pqc\" (UniqueName: \"kubernetes.io/projected/6793b456-539c-4ce8-a3e6-b717e7702693-kube-api-access-d6pqc\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.423650 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6793b456-539c-4ce8-a3e6-b717e7702693-secret-volume\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.423686 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6793b456-539c-4ce8-a3e6-b717e7702693-config-volume\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.424956 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6793b456-539c-4ce8-a3e6-b717e7702693-config-volume\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.442027 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6793b456-539c-4ce8-a3e6-b717e7702693-secret-volume\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.446945 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6pqc\" (UniqueName: \"kubernetes.io/projected/6793b456-539c-4ce8-a3e6-b717e7702693-kube-api-access-d6pqc\") pod \"collect-profiles-29422890-xwsmd\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.471890 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:00 crc kubenswrapper[4921]: W1210 13:30:00.941251 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6793b456_539c_4ce8_a3e6_b717e7702693.slice/crio-c48b9a792736cd58eb42d7b691547692f53cab454e843e0bfe2f80276e416f21 WatchSource:0}: Error finding container c48b9a792736cd58eb42d7b691547692f53cab454e843e0bfe2f80276e416f21: Status 404 returned error can't find the container with id c48b9a792736cd58eb42d7b691547692f53cab454e843e0bfe2f80276e416f21 Dec 10 13:30:00 crc kubenswrapper[4921]: I1210 13:30:00.944365 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd"] Dec 10 13:30:01 crc kubenswrapper[4921]: I1210 13:30:01.145243 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" event={"ID":"6793b456-539c-4ce8-a3e6-b717e7702693","Type":"ContainerStarted","Data":"c48b9a792736cd58eb42d7b691547692f53cab454e843e0bfe2f80276e416f21"} Dec 10 13:30:02 crc kubenswrapper[4921]: I1210 13:30:02.155356 4921 generic.go:334] "Generic (PLEG): container finished" podID="6793b456-539c-4ce8-a3e6-b717e7702693" containerID="316886961f7c0f19edf4b937383faf505960ce3d64a1a927523c6e0e7d557830" exitCode=0 Dec 10 13:30:02 crc kubenswrapper[4921]: I1210 13:30:02.155413 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" event={"ID":"6793b456-539c-4ce8-a3e6-b717e7702693","Type":"ContainerDied","Data":"316886961f7c0f19edf4b937383faf505960ce3d64a1a927523c6e0e7d557830"} Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.528014 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.679621 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6793b456-539c-4ce8-a3e6-b717e7702693-config-volume\") pod \"6793b456-539c-4ce8-a3e6-b717e7702693\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.680001 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6793b456-539c-4ce8-a3e6-b717e7702693-secret-volume\") pod \"6793b456-539c-4ce8-a3e6-b717e7702693\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.680096 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6pqc\" (UniqueName: \"kubernetes.io/projected/6793b456-539c-4ce8-a3e6-b717e7702693-kube-api-access-d6pqc\") pod \"6793b456-539c-4ce8-a3e6-b717e7702693\" (UID: \"6793b456-539c-4ce8-a3e6-b717e7702693\") " Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.680585 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6793b456-539c-4ce8-a3e6-b717e7702693-config-volume" (OuterVolumeSpecName: "config-volume") pod "6793b456-539c-4ce8-a3e6-b717e7702693" (UID: "6793b456-539c-4ce8-a3e6-b717e7702693"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.685332 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6793b456-539c-4ce8-a3e6-b717e7702693-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6793b456-539c-4ce8-a3e6-b717e7702693" (UID: "6793b456-539c-4ce8-a3e6-b717e7702693"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.685670 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6793b456-539c-4ce8-a3e6-b717e7702693-kube-api-access-d6pqc" (OuterVolumeSpecName: "kube-api-access-d6pqc") pod "6793b456-539c-4ce8-a3e6-b717e7702693" (UID: "6793b456-539c-4ce8-a3e6-b717e7702693"). InnerVolumeSpecName "kube-api-access-d6pqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.782575 4921 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6793b456-539c-4ce8-a3e6-b717e7702693-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.782783 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6pqc\" (UniqueName: \"kubernetes.io/projected/6793b456-539c-4ce8-a3e6-b717e7702693-kube-api-access-d6pqc\") on node \"crc\" DevicePath \"\"" Dec 10 13:30:03 crc kubenswrapper[4921]: I1210 13:30:03.782859 4921 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6793b456-539c-4ce8-a3e6-b717e7702693-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 13:30:04 crc kubenswrapper[4921]: I1210 13:30:04.187730 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" Dec 10 13:30:04 crc kubenswrapper[4921]: I1210 13:30:04.188262 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422890-xwsmd" event={"ID":"6793b456-539c-4ce8-a3e6-b717e7702693","Type":"ContainerDied","Data":"c48b9a792736cd58eb42d7b691547692f53cab454e843e0bfe2f80276e416f21"} Dec 10 13:30:04 crc kubenswrapper[4921]: I1210 13:30:04.188301 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c48b9a792736cd58eb42d7b691547692f53cab454e843e0bfe2f80276e416f21" Dec 10 13:30:04 crc kubenswrapper[4921]: I1210 13:30:04.596539 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt"] Dec 10 13:30:04 crc kubenswrapper[4921]: I1210 13:30:04.603416 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422845-kdjmt"] Dec 10 13:30:05 crc kubenswrapper[4921]: I1210 13:30:05.205044 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="959e5098-562a-471a-9396-fed74ed113b5" path="/var/lib/kubelet/pods/959e5098-562a-471a-9396-fed74ed113b5/volumes" Dec 10 13:30:16 crc kubenswrapper[4921]: I1210 13:30:16.710455 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:30:16 crc kubenswrapper[4921]: I1210 13:30:16.710981 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:30:46 crc kubenswrapper[4921]: I1210 13:30:46.526889 4921 scope.go:117] "RemoveContainer" containerID="7c7a78489563e20e664463b70c0da0356f4377687c580f0b90944d9ce6ef68a5" Dec 10 13:30:46 crc kubenswrapper[4921]: I1210 13:30:46.711051 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:30:46 crc kubenswrapper[4921]: I1210 13:30:46.711114 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.686119 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7wkx6"] Dec 10 13:30:56 crc kubenswrapper[4921]: E1210 13:30:56.686987 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6793b456-539c-4ce8-a3e6-b717e7702693" containerName="collect-profiles" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.686998 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="6793b456-539c-4ce8-a3e6-b717e7702693" containerName="collect-profiles" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.687221 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="6793b456-539c-4ce8-a3e6-b717e7702693" containerName="collect-profiles" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.688379 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.705048 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7wkx6"] Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.771466 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvrp5\" (UniqueName: \"kubernetes.io/projected/bb43344d-6c3e-446e-aff4-1cd3760d392b-kube-api-access-nvrp5\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.771837 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-utilities\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.772102 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-catalog-content\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.873639 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-catalog-content\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.873794 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvrp5\" (UniqueName: \"kubernetes.io/projected/bb43344d-6c3e-446e-aff4-1cd3760d392b-kube-api-access-nvrp5\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.873852 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-utilities\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.874113 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-catalog-content\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.874336 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-utilities\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:56 crc kubenswrapper[4921]: I1210 13:30:56.897253 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvrp5\" (UniqueName: \"kubernetes.io/projected/bb43344d-6c3e-446e-aff4-1cd3760d392b-kube-api-access-nvrp5\") pod \"redhat-operators-7wkx6\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:57 crc kubenswrapper[4921]: I1210 13:30:57.007945 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:30:57 crc kubenswrapper[4921]: I1210 13:30:57.525793 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7wkx6"] Dec 10 13:30:57 crc kubenswrapper[4921]: I1210 13:30:57.689017 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerStarted","Data":"83904760e0a9d42390c3a810fa17357bd5fa6b82d39f96e0ccfe3eb398fbc206"} Dec 10 13:30:58 crc kubenswrapper[4921]: I1210 13:30:58.697628 4921 generic.go:334] "Generic (PLEG): container finished" podID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerID="4f8d1f26ce0950ea3cb4e04ef6b932f7e49da9efd37926b696abde32c0ea6a74" exitCode=0 Dec 10 13:30:58 crc kubenswrapper[4921]: I1210 13:30:58.697712 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerDied","Data":"4f8d1f26ce0950ea3cb4e04ef6b932f7e49da9efd37926b696abde32c0ea6a74"} Dec 10 13:31:00 crc kubenswrapper[4921]: I1210 13:31:00.718513 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerStarted","Data":"67e684c2ec40a01f42e6e550ca03a153dce07941b72071dd478c70869baf5686"} Dec 10 13:31:03 crc kubenswrapper[4921]: I1210 13:31:03.742718 4921 generic.go:334] "Generic (PLEG): container finished" podID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerID="67e684c2ec40a01f42e6e550ca03a153dce07941b72071dd478c70869baf5686" exitCode=0 Dec 10 13:31:03 crc kubenswrapper[4921]: I1210 13:31:03.742812 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerDied","Data":"67e684c2ec40a01f42e6e550ca03a153dce07941b72071dd478c70869baf5686"} Dec 10 13:31:05 crc kubenswrapper[4921]: I1210 13:31:05.764701 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerStarted","Data":"0e206c00c004c9eb910499ffeeb1f78ec0f4c9aa11db2098cbfcffcc4269524c"} Dec 10 13:31:05 crc kubenswrapper[4921]: I1210 13:31:05.787499 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7wkx6" podStartSLOduration=3.311563978 podStartE2EDuration="9.787477628s" podCreationTimestamp="2025-12-10 13:30:56 +0000 UTC" firstStartedPulling="2025-12-10 13:30:58.699366773 +0000 UTC m=+2055.915588697" lastFinishedPulling="2025-12-10 13:31:05.175280423 +0000 UTC m=+2062.391502347" observedRunningTime="2025-12-10 13:31:05.784452147 +0000 UTC m=+2063.000674081" watchObservedRunningTime="2025-12-10 13:31:05.787477628 +0000 UTC m=+2063.003699542" Dec 10 13:31:07 crc kubenswrapper[4921]: I1210 13:31:07.008552 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:31:07 crc kubenswrapper[4921]: I1210 13:31:07.008860 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:31:08 crc kubenswrapper[4921]: I1210 13:31:08.057803 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7wkx6" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="registry-server" probeResult="failure" output=< Dec 10 13:31:08 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 13:31:08 crc kubenswrapper[4921]: > Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.710868 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.711430 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.711479 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.712141 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c43fd6b298db60ab5b71d2d16349d66c9cda54d132fbb8a931cadf7647f498e4"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.712191 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://c43fd6b298db60ab5b71d2d16349d66c9cda54d132fbb8a931cadf7647f498e4" gracePeriod=600 Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.855971 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="c43fd6b298db60ab5b71d2d16349d66c9cda54d132fbb8a931cadf7647f498e4" exitCode=0 Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.856026 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"c43fd6b298db60ab5b71d2d16349d66c9cda54d132fbb8a931cadf7647f498e4"} Dec 10 13:31:16 crc kubenswrapper[4921]: I1210 13:31:16.856071 4921 scope.go:117] "RemoveContainer" containerID="0c62b92cda59f27c8c407b11b3375247aad49e7f6c08bd5714c68b8dfabe123c" Dec 10 13:31:17 crc kubenswrapper[4921]: I1210 13:31:17.071105 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:31:17 crc kubenswrapper[4921]: I1210 13:31:17.134267 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:31:17 crc kubenswrapper[4921]: I1210 13:31:17.312967 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7wkx6"] Dec 10 13:31:17 crc kubenswrapper[4921]: I1210 13:31:17.866149 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e"} Dec 10 13:31:18 crc kubenswrapper[4921]: I1210 13:31:18.874293 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7wkx6" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="registry-server" containerID="cri-o://0e206c00c004c9eb910499ffeeb1f78ec0f4c9aa11db2098cbfcffcc4269524c" gracePeriod=2 Dec 10 13:31:19 crc kubenswrapper[4921]: I1210 13:31:19.884660 4921 generic.go:334] "Generic (PLEG): container finished" podID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerID="0e206c00c004c9eb910499ffeeb1f78ec0f4c9aa11db2098cbfcffcc4269524c" exitCode=0 Dec 10 13:31:19 crc kubenswrapper[4921]: I1210 13:31:19.884706 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerDied","Data":"0e206c00c004c9eb910499ffeeb1f78ec0f4c9aa11db2098cbfcffcc4269524c"} Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.575745 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.641954 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-catalog-content\") pod \"bb43344d-6c3e-446e-aff4-1cd3760d392b\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.642042 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvrp5\" (UniqueName: \"kubernetes.io/projected/bb43344d-6c3e-446e-aff4-1cd3760d392b-kube-api-access-nvrp5\") pod \"bb43344d-6c3e-446e-aff4-1cd3760d392b\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.642187 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-utilities\") pod \"bb43344d-6c3e-446e-aff4-1cd3760d392b\" (UID: \"bb43344d-6c3e-446e-aff4-1cd3760d392b\") " Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.643696 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-utilities" (OuterVolumeSpecName: "utilities") pod "bb43344d-6c3e-446e-aff4-1cd3760d392b" (UID: "bb43344d-6c3e-446e-aff4-1cd3760d392b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.651286 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb43344d-6c3e-446e-aff4-1cd3760d392b-kube-api-access-nvrp5" (OuterVolumeSpecName: "kube-api-access-nvrp5") pod "bb43344d-6c3e-446e-aff4-1cd3760d392b" (UID: "bb43344d-6c3e-446e-aff4-1cd3760d392b"). InnerVolumeSpecName "kube-api-access-nvrp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.744101 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.744137 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvrp5\" (UniqueName: \"kubernetes.io/projected/bb43344d-6c3e-446e-aff4-1cd3760d392b-kube-api-access-nvrp5\") on node \"crc\" DevicePath \"\"" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.768680 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb43344d-6c3e-446e-aff4-1cd3760d392b" (UID: "bb43344d-6c3e-446e-aff4-1cd3760d392b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.846285 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb43344d-6c3e-446e-aff4-1cd3760d392b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.905575 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7wkx6" event={"ID":"bb43344d-6c3e-446e-aff4-1cd3760d392b","Type":"ContainerDied","Data":"83904760e0a9d42390c3a810fa17357bd5fa6b82d39f96e0ccfe3eb398fbc206"} Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.905635 4921 scope.go:117] "RemoveContainer" containerID="0e206c00c004c9eb910499ffeeb1f78ec0f4c9aa11db2098cbfcffcc4269524c" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.905649 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7wkx6" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.936465 4921 scope.go:117] "RemoveContainer" containerID="67e684c2ec40a01f42e6e550ca03a153dce07941b72071dd478c70869baf5686" Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.943750 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7wkx6"] Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.952806 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7wkx6"] Dec 10 13:31:21 crc kubenswrapper[4921]: I1210 13:31:21.970825 4921 scope.go:117] "RemoveContainer" containerID="4f8d1f26ce0950ea3cb4e04ef6b932f7e49da9efd37926b696abde32c0ea6a74" Dec 10 13:31:23 crc kubenswrapper[4921]: I1210 13:31:23.202054 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" path="/var/lib/kubelet/pods/bb43344d-6c3e-446e-aff4-1cd3760d392b/volumes" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.380125 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2zhq8"] Dec 10 13:32:46 crc kubenswrapper[4921]: E1210 13:32:46.381042 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="extract-content" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.381053 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="extract-content" Dec 10 13:32:46 crc kubenswrapper[4921]: E1210 13:32:46.381066 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="extract-utilities" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.381072 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="extract-utilities" Dec 10 13:32:46 crc kubenswrapper[4921]: E1210 13:32:46.381095 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="registry-server" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.381102 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="registry-server" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.381284 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb43344d-6c3e-446e-aff4-1cd3760d392b" containerName="registry-server" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.407152 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2zhq8"] Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.407360 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.526757 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bkgz\" (UniqueName: \"kubernetes.io/projected/b7920f09-f65c-42f5-8009-6f82728acd77-kube-api-access-2bkgz\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.526993 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7920f09-f65c-42f5-8009-6f82728acd77-utilities\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.527097 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7920f09-f65c-42f5-8009-6f82728acd77-catalog-content\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.629258 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7920f09-f65c-42f5-8009-6f82728acd77-utilities\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.629336 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7920f09-f65c-42f5-8009-6f82728acd77-catalog-content\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.629460 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bkgz\" (UniqueName: \"kubernetes.io/projected/b7920f09-f65c-42f5-8009-6f82728acd77-kube-api-access-2bkgz\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.630260 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7920f09-f65c-42f5-8009-6f82728acd77-utilities\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.630579 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7920f09-f65c-42f5-8009-6f82728acd77-catalog-content\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.650264 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bkgz\" (UniqueName: \"kubernetes.io/projected/b7920f09-f65c-42f5-8009-6f82728acd77-kube-api-access-2bkgz\") pod \"certified-operators-2zhq8\" (UID: \"b7920f09-f65c-42f5-8009-6f82728acd77\") " pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:46 crc kubenswrapper[4921]: I1210 13:32:46.741873 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:32:47 crc kubenswrapper[4921]: I1210 13:32:47.282359 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2zhq8"] Dec 10 13:32:47 crc kubenswrapper[4921]: I1210 13:32:47.988866 4921 generic.go:334] "Generic (PLEG): container finished" podID="b7920f09-f65c-42f5-8009-6f82728acd77" containerID="2f740b60005f0b79c4f56ec1ef6d4e9eca4f5363c3cf619958dd24d887748212" exitCode=0 Dec 10 13:32:47 crc kubenswrapper[4921]: I1210 13:32:47.989045 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zhq8" event={"ID":"b7920f09-f65c-42f5-8009-6f82728acd77","Type":"ContainerDied","Data":"2f740b60005f0b79c4f56ec1ef6d4e9eca4f5363c3cf619958dd24d887748212"} Dec 10 13:32:47 crc kubenswrapper[4921]: I1210 13:32:47.989374 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zhq8" event={"ID":"b7920f09-f65c-42f5-8009-6f82728acd77","Type":"ContainerStarted","Data":"2c4fa74c4c2d4f40a13b0ddceff25c48bb322012eed6a6acd30918894402a44d"} Dec 10 13:32:47 crc kubenswrapper[4921]: I1210 13:32:47.994273 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:32:48 crc kubenswrapper[4921]: I1210 13:32:48.963314 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c8qgz"] Dec 10 13:32:48 crc kubenswrapper[4921]: I1210 13:32:48.970064 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:48 crc kubenswrapper[4921]: I1210 13:32:48.973225 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8qgz"] Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.076920 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-utilities\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.076981 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-catalog-content\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.077099 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99ph9\" (UniqueName: \"kubernetes.io/projected/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-kube-api-access-99ph9\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.178648 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-catalog-content\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.179100 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-catalog-content\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.179332 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99ph9\" (UniqueName: \"kubernetes.io/projected/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-kube-api-access-99ph9\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.179783 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-utilities\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.180096 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-utilities\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.201951 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99ph9\" (UniqueName: \"kubernetes.io/projected/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-kube-api-access-99ph9\") pod \"community-operators-c8qgz\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.297282 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:32:49 crc kubenswrapper[4921]: I1210 13:32:49.980489 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8qgz"] Dec 10 13:32:49 crc kubenswrapper[4921]: W1210 13:32:49.993624 4921 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod915ef55d_c3f8_43f2_a326_f2c7a72ce42c.slice/crio-e4f3fbc1e2bf94720d994471396c68941522e88cef82762687b0044ed715e986 WatchSource:0}: Error finding container e4f3fbc1e2bf94720d994471396c68941522e88cef82762687b0044ed715e986: Status 404 returned error can't find the container with id e4f3fbc1e2bf94720d994471396c68941522e88cef82762687b0044ed715e986 Dec 10 13:32:50 crc kubenswrapper[4921]: I1210 13:32:50.012303 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerStarted","Data":"e4f3fbc1e2bf94720d994471396c68941522e88cef82762687b0044ed715e986"} Dec 10 13:32:51 crc kubenswrapper[4921]: I1210 13:32:51.023340 4921 generic.go:334] "Generic (PLEG): container finished" podID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerID="4a22e4e92e2fa0e2c74b1031a546b202447d56512502386dad041423a4a5cf60" exitCode=0 Dec 10 13:32:51 crc kubenswrapper[4921]: I1210 13:32:51.023403 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerDied","Data":"4a22e4e92e2fa0e2c74b1031a546b202447d56512502386dad041423a4a5cf60"} Dec 10 13:32:55 crc kubenswrapper[4921]: I1210 13:32:55.060064 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerStarted","Data":"7cc513eb338e5cd98da0ec8edf91d0f69212d8483a7b68b98baaa24c40e52450"} Dec 10 13:32:56 crc kubenswrapper[4921]: I1210 13:32:56.071582 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zhq8" event={"ID":"b7920f09-f65c-42f5-8009-6f82728acd77","Type":"ContainerStarted","Data":"59af6b1ed82a0df19ca5a29b9c2425b08c97d303e814bbcc619198691a556ab6"} Dec 10 13:32:59 crc kubenswrapper[4921]: I1210 13:32:59.103255 4921 generic.go:334] "Generic (PLEG): container finished" podID="b7920f09-f65c-42f5-8009-6f82728acd77" containerID="59af6b1ed82a0df19ca5a29b9c2425b08c97d303e814bbcc619198691a556ab6" exitCode=0 Dec 10 13:32:59 crc kubenswrapper[4921]: I1210 13:32:59.103347 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zhq8" event={"ID":"b7920f09-f65c-42f5-8009-6f82728acd77","Type":"ContainerDied","Data":"59af6b1ed82a0df19ca5a29b9c2425b08c97d303e814bbcc619198691a556ab6"} Dec 10 13:32:59 crc kubenswrapper[4921]: I1210 13:32:59.106980 4921 generic.go:334] "Generic (PLEG): container finished" podID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerID="7cc513eb338e5cd98da0ec8edf91d0f69212d8483a7b68b98baaa24c40e52450" exitCode=0 Dec 10 13:32:59 crc kubenswrapper[4921]: I1210 13:32:59.107012 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerDied","Data":"7cc513eb338e5cd98da0ec8edf91d0f69212d8483a7b68b98baaa24c40e52450"} Dec 10 13:33:00 crc kubenswrapper[4921]: E1210 13:33:00.425541 4921 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.233s" Dec 10 13:33:05 crc kubenswrapper[4921]: I1210 13:33:05.159099 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zhq8" event={"ID":"b7920f09-f65c-42f5-8009-6f82728acd77","Type":"ContainerStarted","Data":"1ba18e920a5e63e38de00017cdd156cfaf16461d81b95db203092b2a9e81c400"} Dec 10 13:33:05 crc kubenswrapper[4921]: I1210 13:33:05.191065 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2zhq8" podStartSLOduration=2.67293112 podStartE2EDuration="19.191044039s" podCreationTimestamp="2025-12-10 13:32:46 +0000 UTC" firstStartedPulling="2025-12-10 13:32:47.993960492 +0000 UTC m=+2165.210182416" lastFinishedPulling="2025-12-10 13:33:04.512073411 +0000 UTC m=+2181.728295335" observedRunningTime="2025-12-10 13:33:05.18325204 +0000 UTC m=+2182.399473964" watchObservedRunningTime="2025-12-10 13:33:05.191044039 +0000 UTC m=+2182.407266003" Dec 10 13:33:06 crc kubenswrapper[4921]: I1210 13:33:06.742236 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:33:06 crc kubenswrapper[4921]: I1210 13:33:06.742605 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:33:06 crc kubenswrapper[4921]: I1210 13:33:06.789454 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:33:07 crc kubenswrapper[4921]: I1210 13:33:07.180073 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerStarted","Data":"3329cfd30965341742d706a821bff7ff937a264a312d529809fc221c201d9dcb"} Dec 10 13:33:09 crc kubenswrapper[4921]: I1210 13:33:09.298055 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:33:09 crc kubenswrapper[4921]: I1210 13:33:09.298532 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:33:09 crc kubenswrapper[4921]: I1210 13:33:09.350699 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:33:09 crc kubenswrapper[4921]: I1210 13:33:09.382078 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c8qgz" podStartSLOduration=6.5687043240000005 podStartE2EDuration="21.382060475s" podCreationTimestamp="2025-12-10 13:32:48 +0000 UTC" firstStartedPulling="2025-12-10 13:32:51.024963915 +0000 UTC m=+2168.241185839" lastFinishedPulling="2025-12-10 13:33:05.838320046 +0000 UTC m=+2183.054541990" observedRunningTime="2025-12-10 13:33:07.200783934 +0000 UTC m=+2184.417005858" watchObservedRunningTime="2025-12-10 13:33:09.382060475 +0000 UTC m=+2186.598282409" Dec 10 13:33:16 crc kubenswrapper[4921]: I1210 13:33:16.787954 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2zhq8" Dec 10 13:33:16 crc kubenswrapper[4921]: I1210 13:33:16.854063 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2zhq8"] Dec 10 13:33:16 crc kubenswrapper[4921]: I1210 13:33:16.911048 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:33:16 crc kubenswrapper[4921]: I1210 13:33:16.911454 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-stzwv" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="registry-server" containerID="cri-o://7cdbe8d92578a072bd55018d52e6a08cf7a5ca32956245f4d56b344e8941253a" gracePeriod=2 Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.278359 4921 generic.go:334] "Generic (PLEG): container finished" podID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerID="7cdbe8d92578a072bd55018d52e6a08cf7a5ca32956245f4d56b344e8941253a" exitCode=0 Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.278513 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stzwv" event={"ID":"80b6c46b-c866-4183-8e59-6f802fdaaa75","Type":"ContainerDied","Data":"7cdbe8d92578a072bd55018d52e6a08cf7a5ca32956245f4d56b344e8941253a"} Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.398278 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.536613 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-catalog-content\") pod \"80b6c46b-c866-4183-8e59-6f802fdaaa75\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.536818 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rcb5\" (UniqueName: \"kubernetes.io/projected/80b6c46b-c866-4183-8e59-6f802fdaaa75-kube-api-access-9rcb5\") pod \"80b6c46b-c866-4183-8e59-6f802fdaaa75\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.536855 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-utilities\") pod \"80b6c46b-c866-4183-8e59-6f802fdaaa75\" (UID: \"80b6c46b-c866-4183-8e59-6f802fdaaa75\") " Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.537821 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-utilities" (OuterVolumeSpecName: "utilities") pod "80b6c46b-c866-4183-8e59-6f802fdaaa75" (UID: "80b6c46b-c866-4183-8e59-6f802fdaaa75"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.549686 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b6c46b-c866-4183-8e59-6f802fdaaa75-kube-api-access-9rcb5" (OuterVolumeSpecName: "kube-api-access-9rcb5") pod "80b6c46b-c866-4183-8e59-6f802fdaaa75" (UID: "80b6c46b-c866-4183-8e59-6f802fdaaa75"). InnerVolumeSpecName "kube-api-access-9rcb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.587227 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80b6c46b-c866-4183-8e59-6f802fdaaa75" (UID: "80b6c46b-c866-4183-8e59-6f802fdaaa75"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.639528 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rcb5\" (UniqueName: \"kubernetes.io/projected/80b6c46b-c866-4183-8e59-6f802fdaaa75-kube-api-access-9rcb5\") on node \"crc\" DevicePath \"\"" Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.639562 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:33:17 crc kubenswrapper[4921]: I1210 13:33:17.639572 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80b6c46b-c866-4183-8e59-6f802fdaaa75-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.288412 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stzwv" event={"ID":"80b6c46b-c866-4183-8e59-6f802fdaaa75","Type":"ContainerDied","Data":"88c9444d0318acaa787e0ad0d7efdaed2a8a77ea8014c9ec34445cc908bb6165"} Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.288482 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stzwv" Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.288489 4921 scope.go:117] "RemoveContainer" containerID="7cdbe8d92578a072bd55018d52e6a08cf7a5ca32956245f4d56b344e8941253a" Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.307339 4921 scope.go:117] "RemoveContainer" containerID="ff04f8214847665c5fc4e9a058dae1170dab08a4433ed060b48724c2c8d19f7d" Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.327079 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.344515 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-stzwv"] Dec 10 13:33:18 crc kubenswrapper[4921]: I1210 13:33:18.347524 4921 scope.go:117] "RemoveContainer" containerID="7e6df91d56a07ee152b89670a221b8ae6f4a5510e92812541d6383d37fab30eb" Dec 10 13:33:19 crc kubenswrapper[4921]: I1210 13:33:19.206118 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" path="/var/lib/kubelet/pods/80b6c46b-c866-4183-8e59-6f802fdaaa75/volumes" Dec 10 13:33:19 crc kubenswrapper[4921]: I1210 13:33:19.343112 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:33:20 crc kubenswrapper[4921]: I1210 13:33:20.807840 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8qgz"] Dec 10 13:33:20 crc kubenswrapper[4921]: I1210 13:33:20.808088 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c8qgz" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="registry-server" containerID="cri-o://3329cfd30965341742d706a821bff7ff937a264a312d529809fc221c201d9dcb" gracePeriod=2 Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.338075 4921 generic.go:334] "Generic (PLEG): container finished" podID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerID="3329cfd30965341742d706a821bff7ff937a264a312d529809fc221c201d9dcb" exitCode=0 Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.338156 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerDied","Data":"3329cfd30965341742d706a821bff7ff937a264a312d529809fc221c201d9dcb"} Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.552918 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.713065 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-catalog-content\") pod \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.713138 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-utilities\") pod \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.713277 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99ph9\" (UniqueName: \"kubernetes.io/projected/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-kube-api-access-99ph9\") pod \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\" (UID: \"915ef55d-c3f8-43f2-a326-f2c7a72ce42c\") " Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.714126 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-utilities" (OuterVolumeSpecName: "utilities") pod "915ef55d-c3f8-43f2-a326-f2c7a72ce42c" (UID: "915ef55d-c3f8-43f2-a326-f2c7a72ce42c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.720148 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-kube-api-access-99ph9" (OuterVolumeSpecName: "kube-api-access-99ph9") pod "915ef55d-c3f8-43f2-a326-f2c7a72ce42c" (UID: "915ef55d-c3f8-43f2-a326-f2c7a72ce42c"). InnerVolumeSpecName "kube-api-access-99ph9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.770906 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "915ef55d-c3f8-43f2-a326-f2c7a72ce42c" (UID: "915ef55d-c3f8-43f2-a326-f2c7a72ce42c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.815382 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.816241 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99ph9\" (UniqueName: \"kubernetes.io/projected/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-kube-api-access-99ph9\") on node \"crc\" DevicePath \"\"" Dec 10 13:33:21 crc kubenswrapper[4921]: I1210 13:33:21.816303 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/915ef55d-c3f8-43f2-a326-f2c7a72ce42c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.351808 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8qgz" event={"ID":"915ef55d-c3f8-43f2-a326-f2c7a72ce42c","Type":"ContainerDied","Data":"e4f3fbc1e2bf94720d994471396c68941522e88cef82762687b0044ed715e986"} Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.352133 4921 scope.go:117] "RemoveContainer" containerID="3329cfd30965341742d706a821bff7ff937a264a312d529809fc221c201d9dcb" Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.352130 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8qgz" Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.380756 4921 scope.go:117] "RemoveContainer" containerID="7cc513eb338e5cd98da0ec8edf91d0f69212d8483a7b68b98baaa24c40e52450" Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.411330 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8qgz"] Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.415676 4921 scope.go:117] "RemoveContainer" containerID="4a22e4e92e2fa0e2c74b1031a546b202447d56512502386dad041423a4a5cf60" Dec 10 13:33:22 crc kubenswrapper[4921]: I1210 13:33:22.418420 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c8qgz"] Dec 10 13:33:23 crc kubenswrapper[4921]: I1210 13:33:23.204578 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" path="/var/lib/kubelet/pods/915ef55d-c3f8-43f2-a326-f2c7a72ce42c/volumes" Dec 10 13:33:46 crc kubenswrapper[4921]: I1210 13:33:46.711446 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:33:46 crc kubenswrapper[4921]: I1210 13:33:46.711924 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.736210 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-66zf6/must-gather-ln95p"] Dec 10 13:34:01 crc kubenswrapper[4921]: E1210 13:34:01.738338 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="extract-utilities" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.738577 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="extract-utilities" Dec 10 13:34:01 crc kubenswrapper[4921]: E1210 13:34:01.738677 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="extract-content" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.738751 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="extract-content" Dec 10 13:34:01 crc kubenswrapper[4921]: E1210 13:34:01.738842 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="registry-server" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.739103 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="registry-server" Dec 10 13:34:01 crc kubenswrapper[4921]: E1210 13:34:01.739191 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="extract-utilities" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.739262 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="extract-utilities" Dec 10 13:34:01 crc kubenswrapper[4921]: E1210 13:34:01.739334 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="extract-content" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.739422 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="extract-content" Dec 10 13:34:01 crc kubenswrapper[4921]: E1210 13:34:01.739503 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="registry-server" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.739572 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="registry-server" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.739848 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b6c46b-c866-4183-8e59-6f802fdaaa75" containerName="registry-server" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.739938 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="915ef55d-c3f8-43f2-a326-f2c7a72ce42c" containerName="registry-server" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.741174 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.743047 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-66zf6"/"openshift-service-ca.crt" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.743726 4921 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-66zf6"/"default-dockercfg-ghm4g" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.743865 4921 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-66zf6"/"kube-root-ca.crt" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.749866 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-66zf6/must-gather-ln95p"] Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.807864 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpkd5\" (UniqueName: \"kubernetes.io/projected/ebe87d92-7e4e-4963-a2af-320f7b71dec3-kube-api-access-rpkd5\") pod \"must-gather-ln95p\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.807939 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ebe87d92-7e4e-4963-a2af-320f7b71dec3-must-gather-output\") pod \"must-gather-ln95p\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.909898 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpkd5\" (UniqueName: \"kubernetes.io/projected/ebe87d92-7e4e-4963-a2af-320f7b71dec3-kube-api-access-rpkd5\") pod \"must-gather-ln95p\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.909966 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ebe87d92-7e4e-4963-a2af-320f7b71dec3-must-gather-output\") pod \"must-gather-ln95p\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.910424 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ebe87d92-7e4e-4963-a2af-320f7b71dec3-must-gather-output\") pod \"must-gather-ln95p\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:01 crc kubenswrapper[4921]: I1210 13:34:01.927637 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpkd5\" (UniqueName: \"kubernetes.io/projected/ebe87d92-7e4e-4963-a2af-320f7b71dec3-kube-api-access-rpkd5\") pod \"must-gather-ln95p\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:02 crc kubenswrapper[4921]: I1210 13:34:02.063013 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:34:02 crc kubenswrapper[4921]: I1210 13:34:02.546874 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-66zf6/must-gather-ln95p"] Dec 10 13:34:02 crc kubenswrapper[4921]: I1210 13:34:02.687165 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/must-gather-ln95p" event={"ID":"ebe87d92-7e4e-4963-a2af-320f7b71dec3","Type":"ContainerStarted","Data":"d4f0f515242b271fa9b7ba3335c374f686ee4155e96a855bcf29088ba0afdd2e"} Dec 10 13:34:16 crc kubenswrapper[4921]: I1210 13:34:16.710769 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:34:16 crc kubenswrapper[4921]: I1210 13:34:16.711260 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:34:16 crc kubenswrapper[4921]: I1210 13:34:16.814143 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/must-gather-ln95p" event={"ID":"ebe87d92-7e4e-4963-a2af-320f7b71dec3","Type":"ContainerStarted","Data":"b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140"} Dec 10 13:34:16 crc kubenswrapper[4921]: I1210 13:34:16.814597 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/must-gather-ln95p" event={"ID":"ebe87d92-7e4e-4963-a2af-320f7b71dec3","Type":"ContainerStarted","Data":"27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62"} Dec 10 13:34:16 crc kubenswrapper[4921]: I1210 13:34:16.830968 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-66zf6/must-gather-ln95p" podStartSLOduration=2.746136997 podStartE2EDuration="15.830952671s" podCreationTimestamp="2025-12-10 13:34:01 +0000 UTC" firstStartedPulling="2025-12-10 13:34:02.545052292 +0000 UTC m=+2239.761274216" lastFinishedPulling="2025-12-10 13:34:15.629867956 +0000 UTC m=+2252.846089890" observedRunningTime="2025-12-10 13:34:16.829621736 +0000 UTC m=+2254.045843660" watchObservedRunningTime="2025-12-10 13:34:16.830952671 +0000 UTC m=+2254.047174595" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.724003 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-66zf6/crc-debug-nvc67"] Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.725555 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.835272 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a35ae80b-9908-4a33-a708-8f04d9d6256c-host\") pod \"crc-debug-nvc67\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.835322 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwvv2\" (UniqueName: \"kubernetes.io/projected/a35ae80b-9908-4a33-a708-8f04d9d6256c-kube-api-access-fwvv2\") pod \"crc-debug-nvc67\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.936762 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a35ae80b-9908-4a33-a708-8f04d9d6256c-host\") pod \"crc-debug-nvc67\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.936820 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwvv2\" (UniqueName: \"kubernetes.io/projected/a35ae80b-9908-4a33-a708-8f04d9d6256c-kube-api-access-fwvv2\") pod \"crc-debug-nvc67\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.937103 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a35ae80b-9908-4a33-a708-8f04d9d6256c-host\") pod \"crc-debug-nvc67\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:19 crc kubenswrapper[4921]: I1210 13:34:19.974145 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwvv2\" (UniqueName: \"kubernetes.io/projected/a35ae80b-9908-4a33-a708-8f04d9d6256c-kube-api-access-fwvv2\") pod \"crc-debug-nvc67\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:20 crc kubenswrapper[4921]: I1210 13:34:20.045481 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:20 crc kubenswrapper[4921]: I1210 13:34:20.846358 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/crc-debug-nvc67" event={"ID":"a35ae80b-9908-4a33-a708-8f04d9d6256c","Type":"ContainerStarted","Data":"9aa22713fc9371110e4b46fb24b5faa069e8dd27cc1f937ac32f5eee6bbe594c"} Dec 10 13:34:34 crc kubenswrapper[4921]: I1210 13:34:34.964165 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/crc-debug-nvc67" event={"ID":"a35ae80b-9908-4a33-a708-8f04d9d6256c","Type":"ContainerStarted","Data":"c2d55db6f56694d5ff5b6ce625d0b6e764d2d56ea2e6acd6fbd7aa23d80588b0"} Dec 10 13:34:34 crc kubenswrapper[4921]: I1210 13:34:34.989757 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-66zf6/crc-debug-nvc67" podStartSLOduration=1.4916999 podStartE2EDuration="15.989741415s" podCreationTimestamp="2025-12-10 13:34:19 +0000 UTC" firstStartedPulling="2025-12-10 13:34:20.078927388 +0000 UTC m=+2257.295149312" lastFinishedPulling="2025-12-10 13:34:34.576968903 +0000 UTC m=+2271.793190827" observedRunningTime="2025-12-10 13:34:34.978142644 +0000 UTC m=+2272.194364578" watchObservedRunningTime="2025-12-10 13:34:34.989741415 +0000 UTC m=+2272.205963339" Dec 10 13:34:46 crc kubenswrapper[4921]: I1210 13:34:46.711360 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:34:46 crc kubenswrapper[4921]: I1210 13:34:46.711920 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:34:46 crc kubenswrapper[4921]: I1210 13:34:46.711954 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:34:46 crc kubenswrapper[4921]: I1210 13:34:46.712598 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:34:46 crc kubenswrapper[4921]: I1210 13:34:46.712640 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" gracePeriod=600 Dec 10 13:34:47 crc kubenswrapper[4921]: E1210 13:34:47.338158 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:34:47 crc kubenswrapper[4921]: E1210 13:34:47.457035 4921 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod354355f7_6630_49a8_bdc5_5e875feecb7f.slice/crio-conmon-6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e.scope\": RecentStats: unable to find data in memory cache]" Dec 10 13:34:48 crc kubenswrapper[4921]: I1210 13:34:48.067739 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" exitCode=0 Dec 10 13:34:48 crc kubenswrapper[4921]: I1210 13:34:48.067818 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e"} Dec 10 13:34:48 crc kubenswrapper[4921]: I1210 13:34:48.068124 4921 scope.go:117] "RemoveContainer" containerID="c43fd6b298db60ab5b71d2d16349d66c9cda54d132fbb8a931cadf7647f498e4" Dec 10 13:34:48 crc kubenswrapper[4921]: I1210 13:34:48.068720 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:34:48 crc kubenswrapper[4921]: E1210 13:34:48.069015 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:34:57 crc kubenswrapper[4921]: I1210 13:34:57.151533 4921 generic.go:334] "Generic (PLEG): container finished" podID="a35ae80b-9908-4a33-a708-8f04d9d6256c" containerID="c2d55db6f56694d5ff5b6ce625d0b6e764d2d56ea2e6acd6fbd7aa23d80588b0" exitCode=0 Dec 10 13:34:57 crc kubenswrapper[4921]: I1210 13:34:57.151592 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/crc-debug-nvc67" event={"ID":"a35ae80b-9908-4a33-a708-8f04d9d6256c","Type":"ContainerDied","Data":"c2d55db6f56694d5ff5b6ce625d0b6e764d2d56ea2e6acd6fbd7aa23d80588b0"} Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.269798 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.302893 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-66zf6/crc-debug-nvc67"] Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.313057 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-66zf6/crc-debug-nvc67"] Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.439347 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a35ae80b-9908-4a33-a708-8f04d9d6256c-host\") pod \"a35ae80b-9908-4a33-a708-8f04d9d6256c\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.439662 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwvv2\" (UniqueName: \"kubernetes.io/projected/a35ae80b-9908-4a33-a708-8f04d9d6256c-kube-api-access-fwvv2\") pod \"a35ae80b-9908-4a33-a708-8f04d9d6256c\" (UID: \"a35ae80b-9908-4a33-a708-8f04d9d6256c\") " Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.440234 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a35ae80b-9908-4a33-a708-8f04d9d6256c-host" (OuterVolumeSpecName: "host") pod "a35ae80b-9908-4a33-a708-8f04d9d6256c" (UID: "a35ae80b-9908-4a33-a708-8f04d9d6256c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.444961 4921 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a35ae80b-9908-4a33-a708-8f04d9d6256c-host\") on node \"crc\" DevicePath \"\"" Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.490311 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a35ae80b-9908-4a33-a708-8f04d9d6256c-kube-api-access-fwvv2" (OuterVolumeSpecName: "kube-api-access-fwvv2") pod "a35ae80b-9908-4a33-a708-8f04d9d6256c" (UID: "a35ae80b-9908-4a33-a708-8f04d9d6256c"). InnerVolumeSpecName "kube-api-access-fwvv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:34:58 crc kubenswrapper[4921]: I1210 13:34:58.546822 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwvv2\" (UniqueName: \"kubernetes.io/projected/a35ae80b-9908-4a33-a708-8f04d9d6256c-kube-api-access-fwvv2\") on node \"crc\" DevicePath \"\"" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.167237 4921 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9aa22713fc9371110e4b46fb24b5faa069e8dd27cc1f937ac32f5eee6bbe594c" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.167531 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-nvc67" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.203035 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a35ae80b-9908-4a33-a708-8f04d9d6256c" path="/var/lib/kubelet/pods/a35ae80b-9908-4a33-a708-8f04d9d6256c/volumes" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.510566 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-66zf6/crc-debug-qfx2w"] Dec 10 13:34:59 crc kubenswrapper[4921]: E1210 13:34:59.511857 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35ae80b-9908-4a33-a708-8f04d9d6256c" containerName="container-00" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.511931 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35ae80b-9908-4a33-a708-8f04d9d6256c" containerName="container-00" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.512169 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="a35ae80b-9908-4a33-a708-8f04d9d6256c" containerName="container-00" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.512779 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.560030 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7cdd\" (UniqueName: \"kubernetes.io/projected/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-kube-api-access-f7cdd\") pod \"crc-debug-qfx2w\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.560073 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-host\") pod \"crc-debug-qfx2w\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.661752 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7cdd\" (UniqueName: \"kubernetes.io/projected/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-kube-api-access-f7cdd\") pod \"crc-debug-qfx2w\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.661798 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-host\") pod \"crc-debug-qfx2w\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.661910 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-host\") pod \"crc-debug-qfx2w\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.692696 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7cdd\" (UniqueName: \"kubernetes.io/projected/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-kube-api-access-f7cdd\") pod \"crc-debug-qfx2w\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:34:59 crc kubenswrapper[4921]: I1210 13:34:59.833945 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:35:00 crc kubenswrapper[4921]: I1210 13:35:00.179909 4921 generic.go:334] "Generic (PLEG): container finished" podID="b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" containerID="2bbf45beace27c281aa407a0cc85a5f3d613a9079d0bc481a133e35ab74c5e36" exitCode=1 Dec 10 13:35:00 crc kubenswrapper[4921]: I1210 13:35:00.180001 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/crc-debug-qfx2w" event={"ID":"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2","Type":"ContainerDied","Data":"2bbf45beace27c281aa407a0cc85a5f3d613a9079d0bc481a133e35ab74c5e36"} Dec 10 13:35:00 crc kubenswrapper[4921]: I1210 13:35:00.180262 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/crc-debug-qfx2w" event={"ID":"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2","Type":"ContainerStarted","Data":"050e79182f7d83a70ceeb31840fb7bf43193927e589a5115bb4826bdc6436966"} Dec 10 13:35:00 crc kubenswrapper[4921]: I1210 13:35:00.215436 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-66zf6/crc-debug-qfx2w"] Dec 10 13:35:00 crc kubenswrapper[4921]: I1210 13:35:00.222324 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-66zf6/crc-debug-qfx2w"] Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.279480 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.396775 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-host\") pod \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.396854 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7cdd\" (UniqueName: \"kubernetes.io/projected/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-kube-api-access-f7cdd\") pod \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\" (UID: \"b05cd5a6-93ea-42d2-9c9f-2774e64dacf2\") " Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.397130 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-host" (OuterVolumeSpecName: "host") pod "b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" (UID: "b05cd5a6-93ea-42d2-9c9f-2774e64dacf2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.397426 4921 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-host\") on node \"crc\" DevicePath \"\"" Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.408119 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-kube-api-access-f7cdd" (OuterVolumeSpecName: "kube-api-access-f7cdd") pod "b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" (UID: "b05cd5a6-93ea-42d2-9c9f-2774e64dacf2"). InnerVolumeSpecName "kube-api-access-f7cdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:35:01 crc kubenswrapper[4921]: I1210 13:35:01.498846 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7cdd\" (UniqueName: \"kubernetes.io/projected/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2-kube-api-access-f7cdd\") on node \"crc\" DevicePath \"\"" Dec 10 13:35:02 crc kubenswrapper[4921]: I1210 13:35:02.193110 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:35:02 crc kubenswrapper[4921]: E1210 13:35:02.193730 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:35:02 crc kubenswrapper[4921]: I1210 13:35:02.197508 4921 scope.go:117] "RemoveContainer" containerID="2bbf45beace27c281aa407a0cc85a5f3d613a9079d0bc481a133e35ab74c5e36" Dec 10 13:35:02 crc kubenswrapper[4921]: I1210 13:35:02.197535 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/crc-debug-qfx2w" Dec 10 13:35:03 crc kubenswrapper[4921]: I1210 13:35:03.224491 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" path="/var/lib/kubelet/pods/b05cd5a6-93ea-42d2-9c9f-2774e64dacf2/volumes" Dec 10 13:35:15 crc kubenswrapper[4921]: I1210 13:35:15.194055 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:35:15 crc kubenswrapper[4921]: E1210 13:35:15.194907 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:35:30 crc kubenswrapper[4921]: I1210 13:35:30.192347 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:35:30 crc kubenswrapper[4921]: E1210 13:35:30.193317 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.060325 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5659c44dfd-j5lr5_af6a0c63-c4e3-4b87-9800-60cc17fbb13d/barbican-api/0.log" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.335580 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5fdffd7d58-bx6fh_6bad2725-64db-4468-b2c2-a1454f632ed9/barbican-keystone-listener/0.log" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.352170 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5659c44dfd-j5lr5_af6a0c63-c4e3-4b87-9800-60cc17fbb13d/barbican-api-log/0.log" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.396503 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5fdffd7d58-bx6fh_6bad2725-64db-4468-b2c2-a1454f632ed9/barbican-keystone-listener-log/0.log" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.685272 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5db748cdcf-qnswn_1e30b467-3355-49fe-862d-9a79cdf0a35b/barbican-worker-log/0.log" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.695356 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5db748cdcf-qnswn_1e30b467-3355-49fe-862d-9a79cdf0a35b/barbican-worker/0.log" Dec 10 13:35:38 crc kubenswrapper[4921]: I1210 13:35:38.905151 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-g4sdz_c9c17643-da1b-4b05-acc8-08b43f910a68/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.005974 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ded7f5ee-1c28-48c4-bd62-5fa8f40608ef/ceilometer-notification-agent/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.037828 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ded7f5ee-1c28-48c4-bd62-5fa8f40608ef/ceilometer-central-agent/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.189163 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ded7f5ee-1c28-48c4-bd62-5fa8f40608ef/proxy-httpd/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.235811 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ded7f5ee-1c28-48c4-bd62-5fa8f40608ef/sg-core/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.312818 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-rl5q9_1b920f02-bacb-47ab-b4d3-8650f287bd0a/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.468507 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2/cinder-api/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.557234 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_bff237c9-cb6c-4dc1-bdd0-499a5c7f33d2/cinder-api-log/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.677448 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6c9638f4-5794-40eb-8303-6301fabc3fb3/probe/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.741945 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6c9638f4-5794-40eb-8303-6301fabc3fb3/cinder-scheduler/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.836494 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-627br_6d9e8b54-936c-4a54-aee9-2e4b0c6d8ed7/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:39 crc kubenswrapper[4921]: I1210 13:35:39.974337 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-sw4fz_3f964510-c89d-47da-ae1f-0f29cbde809f/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:40 crc kubenswrapper[4921]: I1210 13:35:40.129370 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667ff9c869-rtdkp_16952130-d663-4f2f-bb74-724fac2ec6dd/init/0.log" Dec 10 13:35:40 crc kubenswrapper[4921]: I1210 13:35:40.281988 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667ff9c869-rtdkp_16952130-d663-4f2f-bb74-724fac2ec6dd/init/0.log" Dec 10 13:35:40 crc kubenswrapper[4921]: I1210 13:35:40.372800 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667ff9c869-rtdkp_16952130-d663-4f2f-bb74-724fac2ec6dd/dnsmasq-dns/0.log" Dec 10 13:35:40 crc kubenswrapper[4921]: I1210 13:35:40.671979 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-szqlc_6d9b88b4-c132-455f-94e1-742726a6bdf8/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:40 crc kubenswrapper[4921]: I1210 13:35:40.826975 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-565cd4b5db-xfcb6_2d4b5545-ad15-4ed1-b655-61fa51deb3d7/keystone-api/0.log" Dec 10 13:35:40 crc kubenswrapper[4921]: I1210 13:35:40.985907 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_d263bef7-339e-4d10-b012-a81641e98b7d/kube-state-metrics/0.log" Dec 10 13:35:41 crc kubenswrapper[4921]: I1210 13:35:41.302329 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-766458cb5f-2g6ln_702f0832-8949-4724-b03d-2a97e46e421e/neutron-api/0.log" Dec 10 13:35:41 crc kubenswrapper[4921]: I1210 13:35:41.440958 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-766458cb5f-2g6ln_702f0832-8949-4724-b03d-2a97e46e421e/neutron-httpd/0.log" Dec 10 13:35:41 crc kubenswrapper[4921]: I1210 13:35:41.862145 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_cf045e82-19ac-4799-a0aa-6ca5cb6cee04/nova-api-api/0.log" Dec 10 13:35:41 crc kubenswrapper[4921]: I1210 13:35:41.912247 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_cf045e82-19ac-4799-a0aa-6ca5cb6cee04/nova-api-log/0.log" Dec 10 13:35:42 crc kubenswrapper[4921]: I1210 13:35:42.194381 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:35:42 crc kubenswrapper[4921]: E1210 13:35:42.194586 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:35:42 crc kubenswrapper[4921]: I1210 13:35:42.229272 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c3e41b45-0540-41a8-9b47-4b86ffd26dc1/nova-cell0-conductor-conductor/0.log" Dec 10 13:35:42 crc kubenswrapper[4921]: I1210 13:35:42.332402 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_31ff4ab0-02dc-48d0-8e1b-d3fa83b903e6/nova-cell1-conductor-conductor/0.log" Dec 10 13:35:42 crc kubenswrapper[4921]: I1210 13:35:42.531166 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_81d397da-3f88-490a-8125-01ddc0b3a196/nova-cell1-novncproxy-novncproxy/0.log" Dec 10 13:35:42 crc kubenswrapper[4921]: I1210 13:35:42.597457 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5baffea2-ac7e-4b10-adfa-1e418bbb1d05/nova-metadata-log/0.log" Dec 10 13:35:42 crc kubenswrapper[4921]: I1210 13:35:42.937968 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_eb69726b-f75d-4d33-8c26-355f8a6dc2ce/nova-scheduler-scheduler/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.071297 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_18df86ca-4da7-4979-8a7d-1db5b93c42f4/mysql-bootstrap/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.195506 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5baffea2-ac7e-4b10-adfa-1e418bbb1d05/nova-metadata-metadata/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.348798 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_18df86ca-4da7-4979-8a7d-1db5b93c42f4/galera/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.441528 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_18df86ca-4da7-4979-8a7d-1db5b93c42f4/mysql-bootstrap/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.556341 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_65febf2e-fbb3-42a6-96e0-b7933c0911dd/mysql-bootstrap/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.799027 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_65febf2e-fbb3-42a6-96e0-b7933c0911dd/galera/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.813884 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_65febf2e-fbb3-42a6-96e0-b7933c0911dd/mysql-bootstrap/0.log" Dec 10 13:35:43 crc kubenswrapper[4921]: I1210 13:35:43.869784 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_5c6a92bb-c309-4db7-89c9-4f6fb2a8c069/openstackclient/0.log" Dec 10 13:35:44 crc kubenswrapper[4921]: I1210 13:35:44.483927 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-75zw8_be165009-1ecf-4849-8cff-e83071094e81/ovn-controller/0.log" Dec 10 13:35:44 crc kubenswrapper[4921]: I1210 13:35:44.522478 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-f9dft_039043c2-b38e-4a56-a3ce-45c9462ed0f1/openstack-network-exporter/0.log" Dec 10 13:35:44 crc kubenswrapper[4921]: I1210 13:35:44.731696 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zv79d_8444cda9-1de8-49a1-81b3-ebf79bae9995/ovsdb-server-init/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.014854 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zv79d_8444cda9-1de8-49a1-81b3-ebf79bae9995/ovs-vswitchd/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.085570 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zv79d_8444cda9-1de8-49a1-81b3-ebf79bae9995/ovsdb-server-init/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.141097 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-zv79d_8444cda9-1de8-49a1-81b3-ebf79bae9995/ovsdb-server/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.342220 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_bed38314-496b-47e1-bd8b-32cc8f05678e/openstack-network-exporter/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.433722 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_bed38314-496b-47e1-bd8b-32cc8f05678e/ovn-northd/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.496498 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f5f51319-9413-4553-b21a-6ac18a452855/openstack-network-exporter/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.636288 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f5f51319-9413-4553-b21a-6ac18a452855/ovsdbserver-nb/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.827114 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_49be5ede-571d-40af-b73e-b2f6678df767/ovsdbserver-sb/0.log" Dec 10 13:35:45 crc kubenswrapper[4921]: I1210 13:35:45.875545 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_49be5ede-571d-40af-b73e-b2f6678df767/openstack-network-exporter/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.107750 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f895c947d-lxwdl_f8ebbb77-6cb6-44f7-86cd-42bf505379ae/placement-api/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.223154 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5f895c947d-lxwdl_f8ebbb77-6cb6-44f7-86cd-42bf505379ae/placement-log/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.292157 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_695d1b33-f9c1-44bb-b41d-9f6de71c3527/setup-container/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.532016 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_695d1b33-f9c1-44bb-b41d-9f6de71c3527/rabbitmq/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.598669 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_695d1b33-f9c1-44bb-b41d-9f6de71c3527/setup-container/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.599305 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_763f1047-618c-4167-9d5d-27d387e8adf4/setup-container/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.938355 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_763f1047-618c-4167-9d5d-27d387e8adf4/rabbitmq/0.log" Dec 10 13:35:46 crc kubenswrapper[4921]: I1210 13:35:46.950882 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_763f1047-618c-4167-9d5d-27d387e8adf4/setup-container/0.log" Dec 10 13:35:47 crc kubenswrapper[4921]: I1210 13:35:47.031930 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-srqdt_253caf07-9604-44ed-951b-a2546b836755/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:47 crc kubenswrapper[4921]: I1210 13:35:47.275550 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-wr5hl_5bb2996f-5e63-42f1-a2a7-757a7fbcff35/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:47 crc kubenswrapper[4921]: I1210 13:35:47.341656 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-fnlg7_215dabd9-d5f8-4c14-9ea0-cb5a516c25e4/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:47 crc kubenswrapper[4921]: I1210 13:35:47.348273 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c0fbcc23-a641-4787-9edc-7bf8e8e46a79/memcached/0.log" Dec 10 13:35:47 crc kubenswrapper[4921]: I1210 13:35:47.472928 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-hcvmh_0cbeca26-1132-4f10-b871-6c520af4ad1b/ssh-known-hosts-edpm-deployment/0.log" Dec 10 13:35:47 crc kubenswrapper[4921]: I1210 13:35:47.775096 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-whdnn_05f95f16-6d6b-4820-bdc1-956651c51519/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 13:35:56 crc kubenswrapper[4921]: I1210 13:35:56.193851 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:35:56 crc kubenswrapper[4921]: E1210 13:35:56.194873 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:36:09 crc kubenswrapper[4921]: I1210 13:36:09.827332 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/util/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.040567 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/pull/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.086386 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/pull/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.108196 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/util/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.193257 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:36:10 crc kubenswrapper[4921]: E1210 13:36:10.193697 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.258872 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/util/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.285207 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/extract/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.328411 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_147d6c35b26de94843aae2cc16def28bc6b9292bfcf7a2079ec0c049658wjps_afe709e3-dccf-4ca4-867a-a75748eb4bcc/pull/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.440073 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-2jpm7_dde58658-03d9-43dc-8fe5-4be3a607934b/kube-rbac-proxy/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.492910 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-2jpm7_dde58658-03d9-43dc-8fe5-4be3a607934b/manager/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.541414 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-f8zhr_488ba5bf-b48b-42f3-ba24-eba12c38a5cb/kube-rbac-proxy/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.658300 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-f8zhr_488ba5bf-b48b-42f3-ba24-eba12c38a5cb/manager/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.745208 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-vfzb5_b297d26d-5199-4ebf-b8ad-5ca6f5e53e86/kube-rbac-proxy/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.759254 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-vfzb5_b297d26d-5199-4ebf-b8ad-5ca6f5e53e86/manager/0.log" Dec 10 13:36:10 crc kubenswrapper[4921]: I1210 13:36:10.974981 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-xqfkp_32c622c7-cf8b-4b25-836e-c13f5c35dde1/kube-rbac-proxy/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.024019 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-xqfkp_32c622c7-cf8b-4b25-836e-c13f5c35dde1/manager/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.100722 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jbx46_1ac6c721-b338-46f7-943e-63f5db2bd354/kube-rbac-proxy/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.174335 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-jbx46_1ac6c721-b338-46f7-943e-63f5db2bd354/manager/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.240016 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-wnpbm_4f0cbb29-bf13-4073-8c9d-28da25a1fbba/kube-rbac-proxy/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.319370 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-wnpbm_4f0cbb29-bf13-4073-8c9d-28da25a1fbba/manager/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.366884 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-7s9mj_c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1/kube-rbac-proxy/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.606104 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-7s9mj_c4b5934f-4fde-47aa-a14e-fdb6f5fe7af1/manager/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.630198 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-gxd4b_5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8/kube-rbac-proxy/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.639069 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-gxd4b_5c6aaa2e-a82b-4d95-b7fc-bfa5eee026c8/manager/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.789137 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-mf85l_83057698-5071-4487-9ed1-3649fe298d00/kube-rbac-proxy/0.log" Dec 10 13:36:11 crc kubenswrapper[4921]: I1210 13:36:11.888359 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-mf85l_83057698-5071-4487-9ed1-3649fe298d00/manager/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.008159 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-q5sxl_30d38f66-817f-4412-8e03-9c55d0417ace/kube-rbac-proxy/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.032534 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-q5sxl_30d38f66-817f-4412-8e03-9c55d0417ace/manager/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.138048 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-94t86_a0e4b9fa-671d-45a9-93e9-078eb23843db/kube-rbac-proxy/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.204179 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-94t86_a0e4b9fa-671d-45a9-93e9-078eb23843db/manager/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.365335 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-xzrb8_22100473-2036-47d3-846e-b9e351b7d7e1/kube-rbac-proxy/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.425460 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-xzrb8_22100473-2036-47d3-846e-b9e351b7d7e1/manager/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.506861 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-kgtwn_e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad/kube-rbac-proxy/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.644885 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-kgtwn_e452d8aa-f7b0-4bbe-9ee2-0f54854b0bad/manager/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.663285 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-8zlrp_c261f893-dc59-43ba-8a28-09528971bfb1/kube-rbac-proxy/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.735770 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-8zlrp_c261f893-dc59-43ba-8a28-09528971bfb1/manager/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.843970 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879fkj985_feac9e56-73ad-4870-8306-0789acfe8a8f/kube-rbac-proxy/0.log" Dec 10 13:36:12 crc kubenswrapper[4921]: I1210 13:36:12.909266 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879fkj985_feac9e56-73ad-4870-8306-0789acfe8a8f/manager/0.log" Dec 10 13:36:13 crc kubenswrapper[4921]: I1210 13:36:13.356541 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-6k82p_122d72c1-1bef-4ecb-90bb-424d2b989b29/registry-server/0.log" Dec 10 13:36:13 crc kubenswrapper[4921]: I1210 13:36:13.419631 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-9dffbf557-rz2h5_5c70fefc-8487-4eb5-9e13-a583bb152dec/operator/0.log" Dec 10 13:36:13 crc kubenswrapper[4921]: I1210 13:36:13.736450 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pdm9q_fc42a438-92ea-4f71-aeaa-62d388327002/kube-rbac-proxy/0.log" Dec 10 13:36:13 crc kubenswrapper[4921]: I1210 13:36:13.804696 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pdm9q_fc42a438-92ea-4f71-aeaa-62d388327002/manager/0.log" Dec 10 13:36:13 crc kubenswrapper[4921]: I1210 13:36:13.919277 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7bf4f7754c-bzqjz_597fc39b-b10d-4b12-af6e-ce35966daa75/manager/0.log" Dec 10 13:36:13 crc kubenswrapper[4921]: I1210 13:36:13.932410 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-hbpss_cdc476bb-a407-403e-9bbe-e2f62e0ce23b/kube-rbac-proxy/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.005533 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-hbpss_cdc476bb-a407-403e-9bbe-e2f62e0ce23b/manager/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.098833 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-h76ts_0c17ea5f-4baf-427a-a2ba-106bb7248194/operator/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.152498 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-8879b_5c3dc67e-e2a4-426f-b365-d325af35b1b6/kube-rbac-proxy/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.293271 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-8879b_5c3dc67e-e2a4-426f-b365-d325af35b1b6/manager/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.324900 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-p7nv4_c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e/kube-rbac-proxy/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.547165 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-p7nv4_c1a2fb9e-5927-4ebf-a3f1-a13564f7c26e/manager/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.599951 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-jkgbt_532d1487-1112-4341-a787-1981d7093054/kube-rbac-proxy/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.737817 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-jkgbt_532d1487-1112-4341-a787-1981d7093054/manager/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.841875 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-x4zn4_d838a4f0-a117-4807-aca0-8bc00b6ad6f1/kube-rbac-proxy/0.log" Dec 10 13:36:14 crc kubenswrapper[4921]: I1210 13:36:14.845921 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-x4zn4_d838a4f0-a117-4807-aca0-8bc00b6ad6f1/manager/0.log" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.333145 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l6qb9"] Dec 10 13:36:19 crc kubenswrapper[4921]: E1210 13:36:19.334343 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" containerName="container-00" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.334359 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" containerName="container-00" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.334628 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="b05cd5a6-93ea-42d2-9c9f-2774e64dacf2" containerName="container-00" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.336320 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.353347 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6qb9"] Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.466344 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb2tq\" (UniqueName: \"kubernetes.io/projected/37e94bc8-669e-4a4d-a9f0-d3106167fe87-kube-api-access-gb2tq\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.466425 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-utilities\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.466638 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-catalog-content\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.568541 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb2tq\" (UniqueName: \"kubernetes.io/projected/37e94bc8-669e-4a4d-a9f0-d3106167fe87-kube-api-access-gb2tq\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.568610 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-utilities\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.568647 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-catalog-content\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.569135 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-catalog-content\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.569778 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-utilities\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.602602 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb2tq\" (UniqueName: \"kubernetes.io/projected/37e94bc8-669e-4a4d-a9f0-d3106167fe87-kube-api-access-gb2tq\") pod \"redhat-marketplace-l6qb9\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:19 crc kubenswrapper[4921]: I1210 13:36:19.670753 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:20 crc kubenswrapper[4921]: I1210 13:36:20.270598 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6qb9"] Dec 10 13:36:20 crc kubenswrapper[4921]: I1210 13:36:20.812179 4921 generic.go:334] "Generic (PLEG): container finished" podID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerID="0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee" exitCode=0 Dec 10 13:36:20 crc kubenswrapper[4921]: I1210 13:36:20.813190 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6qb9" event={"ID":"37e94bc8-669e-4a4d-a9f0-d3106167fe87","Type":"ContainerDied","Data":"0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee"} Dec 10 13:36:20 crc kubenswrapper[4921]: I1210 13:36:20.813292 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6qb9" event={"ID":"37e94bc8-669e-4a4d-a9f0-d3106167fe87","Type":"ContainerStarted","Data":"7cf0890e3b66dbc66b1a2ec104491aa32cd582289d754c74b77946fffdb8b79e"} Dec 10 13:36:21 crc kubenswrapper[4921]: I1210 13:36:21.193458 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:36:21 crc kubenswrapper[4921]: E1210 13:36:21.193698 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:36:22 crc kubenswrapper[4921]: I1210 13:36:22.833684 4921 generic.go:334] "Generic (PLEG): container finished" podID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerID="b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99" exitCode=0 Dec 10 13:36:22 crc kubenswrapper[4921]: I1210 13:36:22.833760 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6qb9" event={"ID":"37e94bc8-669e-4a4d-a9f0-d3106167fe87","Type":"ContainerDied","Data":"b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99"} Dec 10 13:36:23 crc kubenswrapper[4921]: I1210 13:36:23.844605 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6qb9" event={"ID":"37e94bc8-669e-4a4d-a9f0-d3106167fe87","Type":"ContainerStarted","Data":"417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906"} Dec 10 13:36:23 crc kubenswrapper[4921]: I1210 13:36:23.861851 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l6qb9" podStartSLOduration=2.091715604 podStartE2EDuration="4.861832992s" podCreationTimestamp="2025-12-10 13:36:19 +0000 UTC" firstStartedPulling="2025-12-10 13:36:20.815158299 +0000 UTC m=+2378.031380213" lastFinishedPulling="2025-12-10 13:36:23.585275677 +0000 UTC m=+2380.801497601" observedRunningTime="2025-12-10 13:36:23.860747923 +0000 UTC m=+2381.076969837" watchObservedRunningTime="2025-12-10 13:36:23.861832992 +0000 UTC m=+2381.078054916" Dec 10 13:36:29 crc kubenswrapper[4921]: I1210 13:36:29.671238 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:29 crc kubenswrapper[4921]: I1210 13:36:29.671658 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:29 crc kubenswrapper[4921]: I1210 13:36:29.729193 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:29 crc kubenswrapper[4921]: I1210 13:36:29.956961 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:30 crc kubenswrapper[4921]: I1210 13:36:30.012085 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6qb9"] Dec 10 13:36:31 crc kubenswrapper[4921]: I1210 13:36:31.921632 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l6qb9" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="registry-server" containerID="cri-o://417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906" gracePeriod=2 Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.440895 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.576573 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-utilities\") pod \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.576654 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb2tq\" (UniqueName: \"kubernetes.io/projected/37e94bc8-669e-4a4d-a9f0-d3106167fe87-kube-api-access-gb2tq\") pod \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.576766 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-catalog-content\") pod \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\" (UID: \"37e94bc8-669e-4a4d-a9f0-d3106167fe87\") " Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.578186 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-utilities" (OuterVolumeSpecName: "utilities") pod "37e94bc8-669e-4a4d-a9f0-d3106167fe87" (UID: "37e94bc8-669e-4a4d-a9f0-d3106167fe87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.581653 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.586615 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37e94bc8-669e-4a4d-a9f0-d3106167fe87-kube-api-access-gb2tq" (OuterVolumeSpecName: "kube-api-access-gb2tq") pod "37e94bc8-669e-4a4d-a9f0-d3106167fe87" (UID: "37e94bc8-669e-4a4d-a9f0-d3106167fe87"). InnerVolumeSpecName "kube-api-access-gb2tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.596597 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37e94bc8-669e-4a4d-a9f0-d3106167fe87" (UID: "37e94bc8-669e-4a4d-a9f0-d3106167fe87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.683767 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb2tq\" (UniqueName: \"kubernetes.io/projected/37e94bc8-669e-4a4d-a9f0-d3106167fe87-kube-api-access-gb2tq\") on node \"crc\" DevicePath \"\"" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.683803 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37e94bc8-669e-4a4d-a9f0-d3106167fe87-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.930131 4921 generic.go:334] "Generic (PLEG): container finished" podID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerID="417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906" exitCode=0 Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.930172 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6qb9" event={"ID":"37e94bc8-669e-4a4d-a9f0-d3106167fe87","Type":"ContainerDied","Data":"417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906"} Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.930183 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l6qb9" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.930197 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l6qb9" event={"ID":"37e94bc8-669e-4a4d-a9f0-d3106167fe87","Type":"ContainerDied","Data":"7cf0890e3b66dbc66b1a2ec104491aa32cd582289d754c74b77946fffdb8b79e"} Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.930213 4921 scope.go:117] "RemoveContainer" containerID="417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.957069 4921 scope.go:117] "RemoveContainer" containerID="b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99" Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.959974 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6qb9"] Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.967426 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l6qb9"] Dec 10 13:36:32 crc kubenswrapper[4921]: I1210 13:36:32.974576 4921 scope.go:117] "RemoveContainer" containerID="0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.014100 4921 scope.go:117] "RemoveContainer" containerID="417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906" Dec 10 13:36:33 crc kubenswrapper[4921]: E1210 13:36:33.014876 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906\": container with ID starting with 417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906 not found: ID does not exist" containerID="417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.014992 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906"} err="failed to get container status \"417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906\": rpc error: code = NotFound desc = could not find container \"417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906\": container with ID starting with 417675498e5c60db30fcc1f385642d647c84b263c80ea53e0b9a762897996906 not found: ID does not exist" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.015091 4921 scope.go:117] "RemoveContainer" containerID="b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99" Dec 10 13:36:33 crc kubenswrapper[4921]: E1210 13:36:33.015532 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99\": container with ID starting with b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99 not found: ID does not exist" containerID="b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.015593 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99"} err="failed to get container status \"b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99\": rpc error: code = NotFound desc = could not find container \"b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99\": container with ID starting with b8ff80841853c67e33388bb483a4bf06f9f6f0594a7008f0ffbbbffeeedd1f99 not found: ID does not exist" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.015638 4921 scope.go:117] "RemoveContainer" containerID="0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee" Dec 10 13:36:33 crc kubenswrapper[4921]: E1210 13:36:33.015957 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee\": container with ID starting with 0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee not found: ID does not exist" containerID="0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.015981 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee"} err="failed to get container status \"0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee\": rpc error: code = NotFound desc = could not find container \"0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee\": container with ID starting with 0e9e9a19f0de924bd230f55613120319ec637893a4e26cb8c9a5464b61afa2ee not found: ID does not exist" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.197885 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:36:33 crc kubenswrapper[4921]: E1210 13:36:33.198746 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:36:33 crc kubenswrapper[4921]: I1210 13:36:33.202566 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" path="/var/lib/kubelet/pods/37e94bc8-669e-4a4d-a9f0-d3106167fe87/volumes" Dec 10 13:36:35 crc kubenswrapper[4921]: I1210 13:36:35.613118 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-w8czv_ddb1c1c4-6c20-4fab-ba30-4d74c96ad4f4/control-plane-machine-set-operator/0.log" Dec 10 13:36:35 crc kubenswrapper[4921]: I1210 13:36:35.840057 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jwb9f_9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4/kube-rbac-proxy/0.log" Dec 10 13:36:35 crc kubenswrapper[4921]: I1210 13:36:35.874753 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jwb9f_9c7fe3ab-dc6a-44fb-9c30-070f46cfd0f4/machine-api-operator/0.log" Dec 10 13:36:46 crc kubenswrapper[4921]: I1210 13:36:46.192907 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:36:46 crc kubenswrapper[4921]: E1210 13:36:46.193620 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:36:47 crc kubenswrapper[4921]: I1210 13:36:47.427853 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-cxncf_92834ade-8e0b-4bc4-82e4-7ca664e21681/cert-manager-controller/0.log" Dec 10 13:36:47 crc kubenswrapper[4921]: I1210 13:36:47.530569 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-mwqs8_edd2568d-f1f7-4327-8e90-769d0598adca/cert-manager-cainjector/0.log" Dec 10 13:36:47 crc kubenswrapper[4921]: I1210 13:36:47.665978 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-bl74c_e4f33fbe-8180-499a-812d-8473d0178c72/cert-manager-webhook/0.log" Dec 10 13:36:59 crc kubenswrapper[4921]: I1210 13:36:59.199775 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:36:59 crc kubenswrapper[4921]: E1210 13:36:59.200517 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:37:00 crc kubenswrapper[4921]: I1210 13:37:00.686524 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-qsr5j_a01c2903-c6f2-4490-9211-07ffc7af0431/nmstate-console-plugin/0.log" Dec 10 13:37:00 crc kubenswrapper[4921]: I1210 13:37:00.775499 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-zb8qp_5c585358-c7f8-430a-a95e-56dfbf0e07e9/nmstate-handler/0.log" Dec 10 13:37:00 crc kubenswrapper[4921]: I1210 13:37:00.873232 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-2gzh4_173dac82-56c2-4045-aa84-52d6531c6a0a/kube-rbac-proxy/0.log" Dec 10 13:37:00 crc kubenswrapper[4921]: I1210 13:37:00.983860 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-2gzh4_173dac82-56c2-4045-aa84-52d6531c6a0a/nmstate-metrics/0.log" Dec 10 13:37:01 crc kubenswrapper[4921]: I1210 13:37:01.115650 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-slfqc_c580327c-ba4c-4336-b0b2-81d2303b967d/nmstate-operator/0.log" Dec 10 13:37:01 crc kubenswrapper[4921]: I1210 13:37:01.208044 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-sx7mt_18a6708a-0f88-4b00-95ca-18199f78a88e/nmstate-webhook/0.log" Dec 10 13:37:10 crc kubenswrapper[4921]: I1210 13:37:10.193533 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:37:10 crc kubenswrapper[4921]: E1210 13:37:10.194408 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.035170 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-spclt_182ccfa3-c32b-4dca-9876-770f89d8eda5/controller/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.147937 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-spclt_182ccfa3-c32b-4dca-9876-770f89d8eda5/kube-rbac-proxy/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.340545 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-frr-files/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.571461 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-reloader/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.640275 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-reloader/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.661155 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-metrics/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.720268 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-frr-files/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.819673 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-frr-files/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.871722 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-reloader/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.877907 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-metrics/0.log" Dec 10 13:37:17 crc kubenswrapper[4921]: I1210 13:37:17.956136 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-metrics/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.136767 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-reloader/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.178290 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-frr-files/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.183583 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/cp-metrics/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.247089 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/controller/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.432718 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/frr-metrics/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.444532 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/kube-rbac-proxy/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.603654 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/kube-rbac-proxy-frr/0.log" Dec 10 13:37:18 crc kubenswrapper[4921]: I1210 13:37:18.838798 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/reloader/0.log" Dec 10 13:37:19 crc kubenswrapper[4921]: I1210 13:37:19.032970 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-kpktr_7421a4d2-6416-4ad9-b177-6644f804b950/frr-k8s-webhook-server/0.log" Dec 10 13:37:19 crc kubenswrapper[4921]: I1210 13:37:19.105760 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-dcc9644d6-vngw7_6344fb01-da13-44f0-997e-8995611d7d49/manager/0.log" Dec 10 13:37:19 crc kubenswrapper[4921]: I1210 13:37:19.336087 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gbfb5_aefe835b-88a0-4bfc-9358-601b13418414/frr/0.log" Dec 10 13:37:19 crc kubenswrapper[4921]: I1210 13:37:19.386004 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-55b8dfdccf-g755g_be00220d-bc15-4e8f-95be-44043850aac1/webhook-server/0.log" Dec 10 13:37:19 crc kubenswrapper[4921]: I1210 13:37:19.499353 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nstdr_9a1c6903-fed0-48be-b83c-ec8416ee2204/kube-rbac-proxy/0.log" Dec 10 13:37:19 crc kubenswrapper[4921]: I1210 13:37:19.736173 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nstdr_9a1c6903-fed0-48be-b83c-ec8416ee2204/speaker/0.log" Dec 10 13:37:22 crc kubenswrapper[4921]: I1210 13:37:22.194918 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:37:22 crc kubenswrapper[4921]: E1210 13:37:22.195474 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.362216 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/util/0.log" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.499498 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/pull/0.log" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.574274 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/util/0.log" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.666550 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/pull/0.log" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.819830 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/util/0.log" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.827938 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/pull/0.log" Dec 10 13:37:33 crc kubenswrapper[4921]: I1210 13:37:33.843570 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fksrxf_e0c8a533-3e1a-4c58-b97a-d9054e2d6476/extract/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.006304 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/util/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.235305 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/pull/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.290862 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/util/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.308229 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/pull/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.447942 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/util/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.482517 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/pull/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.504083 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83psbdz_1d3566b0-e028-4148-a7be-04cf455a999c/extract/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.642193 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/extract-utilities/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.836852 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/extract-content/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.858158 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/extract-utilities/0.log" Dec 10 13:37:34 crc kubenswrapper[4921]: I1210 13:37:34.903965 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/extract-content/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.075743 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/extract-utilities/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.088960 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/extract-content/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.178841 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2zhq8_b7920f09-f65c-42f5-8009-6f82728acd77/registry-server/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.281452 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/extract-utilities/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.454663 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/extract-utilities/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.495639 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/extract-content/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.570347 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/extract-content/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.683141 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/extract-content/0.log" Dec 10 13:37:35 crc kubenswrapper[4921]: I1210 13:37:35.737224 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/extract-utilities/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.068266 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5gwtb_26640aa3-6e2c-4c26-a797-ebcc8cbdbdec/registry-server/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.077533 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pwjmn_7a4e394f-d2a7-4f32-8da5-a2a221b267ea/marketplace-operator/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.162835 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/extract-utilities/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.195152 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:37:36 crc kubenswrapper[4921]: E1210 13:37:36.195411 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.405279 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/extract-content/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.432907 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/extract-utilities/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.465972 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/extract-content/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.661603 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/extract-utilities/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.729821 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/extract-content/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.815732 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/extract-utilities/0.log" Dec 10 13:37:36 crc kubenswrapper[4921]: I1210 13:37:36.935632 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hs22j_20b7355a-9d7b-46c3-be68-346748a6d5d7/registry-server/0.log" Dec 10 13:37:37 crc kubenswrapper[4921]: I1210 13:37:37.098031 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/extract-content/0.log" Dec 10 13:37:37 crc kubenswrapper[4921]: I1210 13:37:37.098501 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/extract-utilities/0.log" Dec 10 13:37:37 crc kubenswrapper[4921]: I1210 13:37:37.142018 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/extract-content/0.log" Dec 10 13:37:37 crc kubenswrapper[4921]: I1210 13:37:37.347496 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/extract-content/0.log" Dec 10 13:37:37 crc kubenswrapper[4921]: I1210 13:37:37.372384 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/extract-utilities/0.log" Dec 10 13:37:37 crc kubenswrapper[4921]: I1210 13:37:37.703575 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jc7wv_4d637dcb-0378-4034-84f1-8e92718180c3/registry-server/0.log" Dec 10 13:37:49 crc kubenswrapper[4921]: I1210 13:37:49.194808 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:37:49 crc kubenswrapper[4921]: E1210 13:37:49.195566 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:38:04 crc kubenswrapper[4921]: I1210 13:38:04.192908 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:38:04 crc kubenswrapper[4921]: E1210 13:38:04.193714 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:38:18 crc kubenswrapper[4921]: I1210 13:38:18.193677 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:38:18 crc kubenswrapper[4921]: E1210 13:38:18.194500 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:38:31 crc kubenswrapper[4921]: I1210 13:38:31.192140 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:38:31 crc kubenswrapper[4921]: E1210 13:38:31.192830 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:38:45 crc kubenswrapper[4921]: I1210 13:38:45.198087 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:38:45 crc kubenswrapper[4921]: E1210 13:38:45.198801 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:39:00 crc kubenswrapper[4921]: I1210 13:39:00.193352 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:39:00 crc kubenswrapper[4921]: E1210 13:39:00.194184 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:39:15 crc kubenswrapper[4921]: I1210 13:39:15.201805 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:39:15 crc kubenswrapper[4921]: E1210 13:39:15.202792 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:39:25 crc kubenswrapper[4921]: I1210 13:39:25.054072 4921 generic.go:334] "Generic (PLEG): container finished" podID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerID="27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62" exitCode=0 Dec 10 13:39:25 crc kubenswrapper[4921]: I1210 13:39:25.054145 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-66zf6/must-gather-ln95p" event={"ID":"ebe87d92-7e4e-4963-a2af-320f7b71dec3","Type":"ContainerDied","Data":"27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62"} Dec 10 13:39:25 crc kubenswrapper[4921]: I1210 13:39:25.055839 4921 scope.go:117] "RemoveContainer" containerID="27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62" Dec 10 13:39:25 crc kubenswrapper[4921]: I1210 13:39:25.850950 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-66zf6_must-gather-ln95p_ebe87d92-7e4e-4963-a2af-320f7b71dec3/gather/0.log" Dec 10 13:39:30 crc kubenswrapper[4921]: I1210 13:39:30.192474 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:39:30 crc kubenswrapper[4921]: E1210 13:39:30.193174 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.340026 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-66zf6/must-gather-ln95p"] Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.340791 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-66zf6/must-gather-ln95p" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="copy" containerID="cri-o://b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140" gracePeriod=2 Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.360827 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-66zf6/must-gather-ln95p"] Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.751432 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-66zf6_must-gather-ln95p_ebe87d92-7e4e-4963-a2af-320f7b71dec3/copy/0.log" Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.752247 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.860298 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpkd5\" (UniqueName: \"kubernetes.io/projected/ebe87d92-7e4e-4963-a2af-320f7b71dec3-kube-api-access-rpkd5\") pod \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.860446 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ebe87d92-7e4e-4963-a2af-320f7b71dec3-must-gather-output\") pod \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\" (UID: \"ebe87d92-7e4e-4963-a2af-320f7b71dec3\") " Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.866720 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe87d92-7e4e-4963-a2af-320f7b71dec3-kube-api-access-rpkd5" (OuterVolumeSpecName: "kube-api-access-rpkd5") pod "ebe87d92-7e4e-4963-a2af-320f7b71dec3" (UID: "ebe87d92-7e4e-4963-a2af-320f7b71dec3"). InnerVolumeSpecName "kube-api-access-rpkd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.868259 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpkd5\" (UniqueName: \"kubernetes.io/projected/ebe87d92-7e4e-4963-a2af-320f7b71dec3-kube-api-access-rpkd5\") on node \"crc\" DevicePath \"\"" Dec 10 13:39:34 crc kubenswrapper[4921]: I1210 13:39:34.993527 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebe87d92-7e4e-4963-a2af-320f7b71dec3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ebe87d92-7e4e-4963-a2af-320f7b71dec3" (UID: "ebe87d92-7e4e-4963-a2af-320f7b71dec3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.071075 4921 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ebe87d92-7e4e-4963-a2af-320f7b71dec3-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.161894 4921 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-66zf6_must-gather-ln95p_ebe87d92-7e4e-4963-a2af-320f7b71dec3/copy/0.log" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.162439 4921 generic.go:334] "Generic (PLEG): container finished" podID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerID="b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140" exitCode=143 Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.162615 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-66zf6/must-gather-ln95p" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.162638 4921 scope.go:117] "RemoveContainer" containerID="b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.213360 4921 scope.go:117] "RemoveContainer" containerID="27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.248669 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" path="/var/lib/kubelet/pods/ebe87d92-7e4e-4963-a2af-320f7b71dec3/volumes" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.357742 4921 scope.go:117] "RemoveContainer" containerID="b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140" Dec 10 13:39:35 crc kubenswrapper[4921]: E1210 13:39:35.360962 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140\": container with ID starting with b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140 not found: ID does not exist" containerID="b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.361112 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140"} err="failed to get container status \"b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140\": rpc error: code = NotFound desc = could not find container \"b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140\": container with ID starting with b76388c563016ad17d394b1f34e18912abda547fbe3e735f4d0bac6cfc7ef140 not found: ID does not exist" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.361217 4921 scope.go:117] "RemoveContainer" containerID="27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62" Dec 10 13:39:35 crc kubenswrapper[4921]: E1210 13:39:35.361569 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62\": container with ID starting with 27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62 not found: ID does not exist" containerID="27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62" Dec 10 13:39:35 crc kubenswrapper[4921]: I1210 13:39:35.361679 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62"} err="failed to get container status \"27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62\": rpc error: code = NotFound desc = could not find container \"27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62\": container with ID starting with 27755b22769497f398cb97dd8aca2ca2f919c7254988d33e37624f705f879b62 not found: ID does not exist" Dec 10 13:39:42 crc kubenswrapper[4921]: I1210 13:39:42.192977 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:39:42 crc kubenswrapper[4921]: E1210 13:39:42.193640 4921 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-vn2n6_openshift-machine-config-operator(354355f7-6630-49a8-bdc5-5e875feecb7f)\"" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" Dec 10 13:39:57 crc kubenswrapper[4921]: I1210 13:39:57.193961 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:39:58 crc kubenswrapper[4921]: I1210 13:39:58.376664 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"4d875f25363b1618acdbe7cd3e7a00722ac8d7beda0229fcd033c23e06946cf7"} Dec 10 13:40:46 crc kubenswrapper[4921]: I1210 13:40:46.835567 4921 scope.go:117] "RemoveContainer" containerID="c2d55db6f56694d5ff5b6ce625d0b6e764d2d56ea2e6acd6fbd7aa23d80588b0" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.421773 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-97hbb"] Dec 10 13:41:22 crc kubenswrapper[4921]: E1210 13:41:22.422603 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="extract-content" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422616 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="extract-content" Dec 10 13:41:22 crc kubenswrapper[4921]: E1210 13:41:22.422642 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="gather" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422648 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="gather" Dec 10 13:41:22 crc kubenswrapper[4921]: E1210 13:41:22.422659 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="extract-utilities" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422666 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="extract-utilities" Dec 10 13:41:22 crc kubenswrapper[4921]: E1210 13:41:22.422682 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="copy" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422688 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="copy" Dec 10 13:41:22 crc kubenswrapper[4921]: E1210 13:41:22.422698 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="registry-server" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422703 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="registry-server" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422874 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="gather" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422890 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="37e94bc8-669e-4a4d-a9f0-d3106167fe87" containerName="registry-server" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.422907 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe87d92-7e4e-4963-a2af-320f7b71dec3" containerName="copy" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.424349 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.432199 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97hbb"] Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.512199 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndklp\" (UniqueName: \"kubernetes.io/projected/91e7de67-6d75-4b44-b417-ada43686e622-kube-api-access-ndklp\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.512533 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-utilities\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.512668 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-catalog-content\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.613774 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-catalog-content\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.613862 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndklp\" (UniqueName: \"kubernetes.io/projected/91e7de67-6d75-4b44-b417-ada43686e622-kube-api-access-ndklp\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.613888 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-utilities\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.614361 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-utilities\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.614453 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-catalog-content\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.641305 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndklp\" (UniqueName: \"kubernetes.io/projected/91e7de67-6d75-4b44-b417-ada43686e622-kube-api-access-ndklp\") pod \"redhat-operators-97hbb\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:22 crc kubenswrapper[4921]: I1210 13:41:22.774677 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:23 crc kubenswrapper[4921]: I1210 13:41:23.276576 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97hbb"] Dec 10 13:41:24 crc kubenswrapper[4921]: I1210 13:41:24.073604 4921 generic.go:334] "Generic (PLEG): container finished" podID="91e7de67-6d75-4b44-b417-ada43686e622" containerID="706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a" exitCode=0 Dec 10 13:41:24 crc kubenswrapper[4921]: I1210 13:41:24.073815 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerDied","Data":"706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a"} Dec 10 13:41:24 crc kubenswrapper[4921]: I1210 13:41:24.073918 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerStarted","Data":"c250f7f665f828dae599c7b4109eec38e3b17c6e9d16604e3ffcb4f0626f3dbd"} Dec 10 13:41:24 crc kubenswrapper[4921]: I1210 13:41:24.078168 4921 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 13:41:25 crc kubenswrapper[4921]: I1210 13:41:25.084310 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerStarted","Data":"d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd"} Dec 10 13:41:27 crc kubenswrapper[4921]: I1210 13:41:27.111482 4921 generic.go:334] "Generic (PLEG): container finished" podID="91e7de67-6d75-4b44-b417-ada43686e622" containerID="d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd" exitCode=0 Dec 10 13:41:27 crc kubenswrapper[4921]: I1210 13:41:27.111571 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerDied","Data":"d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd"} Dec 10 13:41:29 crc kubenswrapper[4921]: I1210 13:41:29.158950 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerStarted","Data":"2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e"} Dec 10 13:41:29 crc kubenswrapper[4921]: I1210 13:41:29.183496 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-97hbb" podStartSLOduration=3.707603034 podStartE2EDuration="7.183465916s" podCreationTimestamp="2025-12-10 13:41:22 +0000 UTC" firstStartedPulling="2025-12-10 13:41:24.077961376 +0000 UTC m=+2681.294183300" lastFinishedPulling="2025-12-10 13:41:27.553824258 +0000 UTC m=+2684.770046182" observedRunningTime="2025-12-10 13:41:29.178093022 +0000 UTC m=+2686.394314956" watchObservedRunningTime="2025-12-10 13:41:29.183465916 +0000 UTC m=+2686.399687850" Dec 10 13:41:32 crc kubenswrapper[4921]: I1210 13:41:32.775154 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:32 crc kubenswrapper[4921]: I1210 13:41:32.775520 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:33 crc kubenswrapper[4921]: I1210 13:41:33.817422 4921 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-97hbb" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="registry-server" probeResult="failure" output=< Dec 10 13:41:33 crc kubenswrapper[4921]: timeout: failed to connect service ":50051" within 1s Dec 10 13:41:33 crc kubenswrapper[4921]: > Dec 10 13:41:42 crc kubenswrapper[4921]: I1210 13:41:42.823933 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:42 crc kubenswrapper[4921]: I1210 13:41:42.878791 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:43 crc kubenswrapper[4921]: I1210 13:41:43.054190 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97hbb"] Dec 10 13:41:44 crc kubenswrapper[4921]: I1210 13:41:44.332522 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-97hbb" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="registry-server" containerID="cri-o://2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e" gracePeriod=2 Dec 10 13:41:44 crc kubenswrapper[4921]: I1210 13:41:44.846934 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.013653 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-utilities\") pod \"91e7de67-6d75-4b44-b417-ada43686e622\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.013713 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndklp\" (UniqueName: \"kubernetes.io/projected/91e7de67-6d75-4b44-b417-ada43686e622-kube-api-access-ndklp\") pod \"91e7de67-6d75-4b44-b417-ada43686e622\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.013827 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-catalog-content\") pod \"91e7de67-6d75-4b44-b417-ada43686e622\" (UID: \"91e7de67-6d75-4b44-b417-ada43686e622\") " Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.015062 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-utilities" (OuterVolumeSpecName: "utilities") pod "91e7de67-6d75-4b44-b417-ada43686e622" (UID: "91e7de67-6d75-4b44-b417-ada43686e622"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.027621 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e7de67-6d75-4b44-b417-ada43686e622-kube-api-access-ndklp" (OuterVolumeSpecName: "kube-api-access-ndklp") pod "91e7de67-6d75-4b44-b417-ada43686e622" (UID: "91e7de67-6d75-4b44-b417-ada43686e622"). InnerVolumeSpecName "kube-api-access-ndklp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.116200 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.116236 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndklp\" (UniqueName: \"kubernetes.io/projected/91e7de67-6d75-4b44-b417-ada43686e622-kube-api-access-ndklp\") on node \"crc\" DevicePath \"\"" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.133699 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91e7de67-6d75-4b44-b417-ada43686e622" (UID: "91e7de67-6d75-4b44-b417-ada43686e622"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.217766 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91e7de67-6d75-4b44-b417-ada43686e622-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.339683 4921 generic.go:334] "Generic (PLEG): container finished" podID="91e7de67-6d75-4b44-b417-ada43686e622" containerID="2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e" exitCode=0 Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.339725 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerDied","Data":"2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e"} Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.339732 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97hbb" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.339750 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97hbb" event={"ID":"91e7de67-6d75-4b44-b417-ada43686e622","Type":"ContainerDied","Data":"c250f7f665f828dae599c7b4109eec38e3b17c6e9d16604e3ffcb4f0626f3dbd"} Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.339772 4921 scope.go:117] "RemoveContainer" containerID="2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.362921 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97hbb"] Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.368548 4921 scope.go:117] "RemoveContainer" containerID="d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.379993 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-97hbb"] Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.402321 4921 scope.go:117] "RemoveContainer" containerID="706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.442720 4921 scope.go:117] "RemoveContainer" containerID="2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e" Dec 10 13:41:45 crc kubenswrapper[4921]: E1210 13:41:45.443112 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e\": container with ID starting with 2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e not found: ID does not exist" containerID="2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.443152 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e"} err="failed to get container status \"2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e\": rpc error: code = NotFound desc = could not find container \"2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e\": container with ID starting with 2318892c201748f9900daaff204090c59a8be19932dfe35e73b932c188022c0e not found: ID does not exist" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.443176 4921 scope.go:117] "RemoveContainer" containerID="d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd" Dec 10 13:41:45 crc kubenswrapper[4921]: E1210 13:41:45.443518 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd\": container with ID starting with d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd not found: ID does not exist" containerID="d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.443565 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd"} err="failed to get container status \"d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd\": rpc error: code = NotFound desc = could not find container \"d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd\": container with ID starting with d4c534a027386c8dbec6544f63e0d944caa1da56a062b80c3c6e7863f5f16dcd not found: ID does not exist" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.443592 4921 scope.go:117] "RemoveContainer" containerID="706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a" Dec 10 13:41:45 crc kubenswrapper[4921]: E1210 13:41:45.443853 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a\": container with ID starting with 706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a not found: ID does not exist" containerID="706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a" Dec 10 13:41:45 crc kubenswrapper[4921]: I1210 13:41:45.443879 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a"} err="failed to get container status \"706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a\": rpc error: code = NotFound desc = could not find container \"706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a\": container with ID starting with 706576c93c993d155f1c7145ef85a1444b153fe02b1e29f9aa511ca650dc5e4a not found: ID does not exist" Dec 10 13:41:47 crc kubenswrapper[4921]: I1210 13:41:47.203027 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e7de67-6d75-4b44-b417-ada43686e622" path="/var/lib/kubelet/pods/91e7de67-6d75-4b44-b417-ada43686e622/volumes" Dec 10 13:42:16 crc kubenswrapper[4921]: I1210 13:42:16.712947 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:42:16 crc kubenswrapper[4921]: I1210 13:42:16.713507 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:42:46 crc kubenswrapper[4921]: I1210 13:42:46.711146 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:42:46 crc kubenswrapper[4921]: I1210 13:42:46.711764 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.016876 4921 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kq5qq"] Dec 10 13:43:15 crc kubenswrapper[4921]: E1210 13:43:15.025912 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="registry-server" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.026048 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="registry-server" Dec 10 13:43:15 crc kubenswrapper[4921]: E1210 13:43:15.026140 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="extract-content" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.026202 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="extract-content" Dec 10 13:43:15 crc kubenswrapper[4921]: E1210 13:43:15.026285 4921 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="extract-utilities" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.026349 4921 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="extract-utilities" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.026641 4921 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e7de67-6d75-4b44-b417-ada43686e622" containerName="registry-server" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.027954 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.038465 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kq5qq"] Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.161335 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-utilities\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.161770 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-catalog-content\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.161806 4921 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-569kk\" (UniqueName: \"kubernetes.io/projected/ab862616-7c6c-4093-923b-0ced7d8eaf49-kube-api-access-569kk\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.264257 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-utilities\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.264736 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-utilities\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.264957 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-catalog-content\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.264988 4921 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-569kk\" (UniqueName: \"kubernetes.io/projected/ab862616-7c6c-4093-923b-0ced7d8eaf49-kube-api-access-569kk\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.265211 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-catalog-content\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.288967 4921 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-569kk\" (UniqueName: \"kubernetes.io/projected/ab862616-7c6c-4093-923b-0ced7d8eaf49-kube-api-access-569kk\") pod \"community-operators-kq5qq\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.381786 4921 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:15 crc kubenswrapper[4921]: I1210 13:43:15.964294 4921 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kq5qq"] Dec 10 13:43:16 crc kubenswrapper[4921]: I1210 13:43:16.073451 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerStarted","Data":"c6ffd5dadb5a6dd1d8787cb85ffc4b9610d033490a633445efd560f5de158828"} Dec 10 13:43:16 crc kubenswrapper[4921]: I1210 13:43:16.710590 4921 patch_prober.go:28] interesting pod/machine-config-daemon-vn2n6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 13:43:16 crc kubenswrapper[4921]: I1210 13:43:16.711202 4921 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 13:43:16 crc kubenswrapper[4921]: I1210 13:43:16.711326 4921 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" Dec 10 13:43:16 crc kubenswrapper[4921]: I1210 13:43:16.712075 4921 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d875f25363b1618acdbe7cd3e7a00722ac8d7beda0229fcd033c23e06946cf7"} pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 13:43:16 crc kubenswrapper[4921]: I1210 13:43:16.712201 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" podUID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerName="machine-config-daemon" containerID="cri-o://4d875f25363b1618acdbe7cd3e7a00722ac8d7beda0229fcd033c23e06946cf7" gracePeriod=600 Dec 10 13:43:17 crc kubenswrapper[4921]: I1210 13:43:17.083009 4921 generic.go:334] "Generic (PLEG): container finished" podID="ab862616-7c6c-4093-923b-0ced7d8eaf49" containerID="eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3" exitCode=0 Dec 10 13:43:17 crc kubenswrapper[4921]: I1210 13:43:17.083049 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerDied","Data":"eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3"} Dec 10 13:43:17 crc kubenswrapper[4921]: I1210 13:43:17.092022 4921 generic.go:334] "Generic (PLEG): container finished" podID="354355f7-6630-49a8-bdc5-5e875feecb7f" containerID="4d875f25363b1618acdbe7cd3e7a00722ac8d7beda0229fcd033c23e06946cf7" exitCode=0 Dec 10 13:43:17 crc kubenswrapper[4921]: I1210 13:43:17.092083 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerDied","Data":"4d875f25363b1618acdbe7cd3e7a00722ac8d7beda0229fcd033c23e06946cf7"} Dec 10 13:43:17 crc kubenswrapper[4921]: I1210 13:43:17.092113 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-vn2n6" event={"ID":"354355f7-6630-49a8-bdc5-5e875feecb7f","Type":"ContainerStarted","Data":"b99d8d02ea2696b448747466501a00d11e7a0ee373798d06d07b658cb3d178a6"} Dec 10 13:43:17 crc kubenswrapper[4921]: I1210 13:43:17.092150 4921 scope.go:117] "RemoveContainer" containerID="6faab8bcc61171add5c6212849f3f417990397c571c057a5639e195842f4d47e" Dec 10 13:43:18 crc kubenswrapper[4921]: I1210 13:43:18.122917 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerStarted","Data":"db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b"} Dec 10 13:43:19 crc kubenswrapper[4921]: I1210 13:43:19.179696 4921 generic.go:334] "Generic (PLEG): container finished" podID="ab862616-7c6c-4093-923b-0ced7d8eaf49" containerID="db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b" exitCode=0 Dec 10 13:43:19 crc kubenswrapper[4921]: I1210 13:43:19.180044 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerDied","Data":"db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b"} Dec 10 13:43:20 crc kubenswrapper[4921]: I1210 13:43:20.190100 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerStarted","Data":"8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916"} Dec 10 13:43:20 crc kubenswrapper[4921]: I1210 13:43:20.211817 4921 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kq5qq" podStartSLOduration=3.482030872 podStartE2EDuration="6.21179627s" podCreationTimestamp="2025-12-10 13:43:14 +0000 UTC" firstStartedPulling="2025-12-10 13:43:17.084901129 +0000 UTC m=+2794.301123053" lastFinishedPulling="2025-12-10 13:43:19.814666527 +0000 UTC m=+2797.030888451" observedRunningTime="2025-12-10 13:43:20.205915722 +0000 UTC m=+2797.422137676" watchObservedRunningTime="2025-12-10 13:43:20.21179627 +0000 UTC m=+2797.428018214" Dec 10 13:43:25 crc kubenswrapper[4921]: I1210 13:43:25.383643 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:25 crc kubenswrapper[4921]: I1210 13:43:25.384090 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:25 crc kubenswrapper[4921]: I1210 13:43:25.430897 4921 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:26 crc kubenswrapper[4921]: I1210 13:43:26.303732 4921 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:26 crc kubenswrapper[4921]: I1210 13:43:26.368652 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kq5qq"] Dec 10 13:43:28 crc kubenswrapper[4921]: I1210 13:43:28.270931 4921 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kq5qq" podUID="ab862616-7c6c-4093-923b-0ced7d8eaf49" containerName="registry-server" containerID="cri-o://8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916" gracePeriod=2 Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.208938 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.279169 4921 generic.go:334] "Generic (PLEG): container finished" podID="ab862616-7c6c-4093-923b-0ced7d8eaf49" containerID="8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916" exitCode=0 Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.279220 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerDied","Data":"8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916"} Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.279241 4921 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kq5qq" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.279266 4921 scope.go:117] "RemoveContainer" containerID="8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.279253 4921 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kq5qq" event={"ID":"ab862616-7c6c-4093-923b-0ced7d8eaf49","Type":"ContainerDied","Data":"c6ffd5dadb5a6dd1d8787cb85ffc4b9610d033490a633445efd560f5de158828"} Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.296618 4921 scope.go:117] "RemoveContainer" containerID="db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.318631 4921 scope.go:117] "RemoveContainer" containerID="eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.320965 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-catalog-content\") pod \"ab862616-7c6c-4093-923b-0ced7d8eaf49\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.321085 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-569kk\" (UniqueName: \"kubernetes.io/projected/ab862616-7c6c-4093-923b-0ced7d8eaf49-kube-api-access-569kk\") pod \"ab862616-7c6c-4093-923b-0ced7d8eaf49\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.321159 4921 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-utilities\") pod \"ab862616-7c6c-4093-923b-0ced7d8eaf49\" (UID: \"ab862616-7c6c-4093-923b-0ced7d8eaf49\") " Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.325478 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-utilities" (OuterVolumeSpecName: "utilities") pod "ab862616-7c6c-4093-923b-0ced7d8eaf49" (UID: "ab862616-7c6c-4093-923b-0ced7d8eaf49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.329606 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab862616-7c6c-4093-923b-0ced7d8eaf49-kube-api-access-569kk" (OuterVolumeSpecName: "kube-api-access-569kk") pod "ab862616-7c6c-4093-923b-0ced7d8eaf49" (UID: "ab862616-7c6c-4093-923b-0ced7d8eaf49"). InnerVolumeSpecName "kube-api-access-569kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.372818 4921 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab862616-7c6c-4093-923b-0ced7d8eaf49" (UID: "ab862616-7c6c-4093-923b-0ced7d8eaf49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.396675 4921 scope.go:117] "RemoveContainer" containerID="8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916" Dec 10 13:43:29 crc kubenswrapper[4921]: E1210 13:43:29.397155 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916\": container with ID starting with 8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916 not found: ID does not exist" containerID="8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.397205 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916"} err="failed to get container status \"8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916\": rpc error: code = NotFound desc = could not find container \"8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916\": container with ID starting with 8af30f5c64764a2968fe137583bc75c590a57ba262da60f2aebc1db1ff2ec916 not found: ID does not exist" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.397232 4921 scope.go:117] "RemoveContainer" containerID="db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b" Dec 10 13:43:29 crc kubenswrapper[4921]: E1210 13:43:29.397652 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b\": container with ID starting with db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b not found: ID does not exist" containerID="db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.397681 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b"} err="failed to get container status \"db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b\": rpc error: code = NotFound desc = could not find container \"db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b\": container with ID starting with db8275845cb331a4d3f701b026f6b8463c3787eb65cc930c3eeee1741b47135b not found: ID does not exist" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.397713 4921 scope.go:117] "RemoveContainer" containerID="eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3" Dec 10 13:43:29 crc kubenswrapper[4921]: E1210 13:43:29.397916 4921 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3\": container with ID starting with eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3 not found: ID does not exist" containerID="eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.397956 4921 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3"} err="failed to get container status \"eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3\": rpc error: code = NotFound desc = could not find container \"eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3\": container with ID starting with eab207f0abbf994bb26f8ba2fe7b95a74cd15a80789c17b42fd96d78c85c0de3 not found: ID does not exist" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.425168 4921 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.425205 4921 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-569kk\" (UniqueName: \"kubernetes.io/projected/ab862616-7c6c-4093-923b-0ced7d8eaf49-kube-api-access-569kk\") on node \"crc\" DevicePath \"\"" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.425219 4921 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab862616-7c6c-4093-923b-0ced7d8eaf49-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.620489 4921 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kq5qq"] Dec 10 13:43:29 crc kubenswrapper[4921]: I1210 13:43:29.628316 4921 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kq5qq"] Dec 10 13:43:31 crc kubenswrapper[4921]: I1210 13:43:31.202109 4921 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab862616-7c6c-4093-923b-0ced7d8eaf49" path="/var/lib/kubelet/pods/ab862616-7c6c-4093-923b-0ced7d8eaf49/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116274446024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116274447017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116266323016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116266324015463 5ustar corecore